Autoregressive Models

Imports

In [1]:
import sys
sys.path.insert(0, '../src/')

import warnings
warnings.filterwarnings('ignore')

%matplotlib inline

from datetime import date
import geopandas as gpd
from IPython.display import display, HTML
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from pandas.plotting import lag_plot
from pandas.plotting import autocorrelation_plot
from statsmodels.tsa.ar_model import AR
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
from utils import load_pkl, generate_times
import seaborn as sns; sns.set()
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import StratifiedKFold
from metrics import *

from preprocessing import normalize

import tqdm as tqdm
from tqdm.autonotebook import tqdm
tqdm.pandas()

# Imports classes
from Baseline import *
from Regressor import *
from utils import *

from IPython.core.interactiveshell import InteractiveShell
InteractiveShell.ast_node_interactivity = "all"

Loading Data

Contour Iris

In [2]:
contour_iris = gpd.read_file(
    '../datasets/iris/iris.shp')

convert_to_int = ['dep', 'insee_com', 'iris', 'code_iris']
for col in convert_to_int:
    contour_iris[col] = contour_iris[col].astype(int)

contour_iris = contour_iris[['code_iris', 'geometry', 'dep']]
contour_iris.head();

Stations and Dates

In [3]:
station_data = pd.read_csv("../datasets/station_to_iris.csv")
station_data.describe();
In [4]:
stations_mode = load_pkl("../datasets/stations_mode.pkl")
subway_stations = [k for k, v in stations_mode.items() if v == 3]
print("Number of Subway stations: {}".format(len(subway_stations)))
Number of Subway stations: 303

Subways stations with less than $80000$ validations per $3$ month. Note that this is before we normalize the data. In the article, they removed $3$ subways stations, assuming that it was closed for renovation work. We printed below the $4$ stations with smaller number of validations.

In [5]:
station_data[(station_data['id'].isin(subway_stations)) & (station_data['validations_count'] < 80000)];
In [6]:
dates = pd.date_range(start="2015-10-01", end="2015-12-31").date

Discretized Matrix

In [7]:
matrix_6h = np.load("../datasets/6h_matrix.npy")
matrix_2h = np.load("../datasets/2h_matrix.npy")
matrix_15m = np.load("../datasets/15m_matrix.npy")

Data Analysis and Preprocessing

In [8]:
f, ax = plt.subplots(1, figsize=(16, 12))
ax = contour_iris[contour_iris['dep'].isin([75, 92, 93, 94])].plot(
    ax=ax, edgecolor='black', column='dep', cmap='icefire_r')
ax.scatter(station_data[station_data['id'].isin(subway_stations)]['x'],
           station_data[station_data['id'].isin(subway_stations)]['y'], color='firebrick', label='Subway Stations')
ax.set_xlabel('Longitude')
ax.set_ylabel('Latitude')
ax.set_title('Subway Stations in ÃŽle de France')
ax.legend()

plt.show();

Min Max Normalization

Below we apply Min Max Normalization to data, with a scale range of $[0, 1]$.

In [9]:
data_matrix_6h = pd.Panel(normalize(matrix_6h), 
                         items=dates, 
                         major_axis=subway_stations, 
                         minor_axis=generate_times("6h")
                        )

data_matrix_2h = pd.Panel(normalize(matrix_2h), 
                         items=dates, 
                         major_axis=subway_stations, 
                         minor_axis=generate_times("2h")
                        )

data_matrix_15m_complete = pd.Panel(matrix_15m, 
                                    items=dates, 
                                    major_axis=subway_stations, 
                                    minor_axis=generate_times("15min")
                                   )

Delete the first $4$ hours, from $00.00.00$ to $04.00.00$ because it's useless, the number of validations in that range is mostly equal to 0.

In [10]:
del_hours = 4
In [11]:
data_matrix_15m = data_matrix_15m_complete.iloc[:, :, del_hours*4:]
In [12]:
data_matrix_15m.to_frame().head()
Out[12]:
2015-10-01 2015-10-02 2015-10-03 2015-10-04 2015-10-05 2015-10-06 2015-10-07 2015-10-08 2015-10-09 2015-10-10 ... 2015-12-22 2015-12-23 2015-12-24 2015-12-25 2015-12-26 2015-12-27 2015-12-28 2015-12-29 2015-12-30 2015-12-31
major minor
198 04:00:00 0.0 3.0 0.0 0.0 0.0 0.0 0.0 2.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
04:15:00 0.0 0.0 0.0 0.0 2.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
04:30:00 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 1.0 1.0 2.0 1.0 0.0 1.0 0.0 0.0 0.0 0.0
04:45:00 1.0 2.0 0.0 2.0 1.0 0.0 1.0 0.0 0.0 0.0 ... 2.0 0.0 0.0 2.0 0.0 0.0 0.0 0.0 0.0 2.0
05:00:00 7.0 11.0 6.0 12.0 10.0 6.0 9.0 12.0 9.0 13.0 ... 11.0 9.0 7.0 1.0 5.0 10.0 12.0 10.0 10.0 5.0

5 rows × 92 columns

In [13]:
dmatrix_mean_6h = data_matrix_6h.mean()
dmatrix_mean_2h = data_matrix_2h.mean()
dmatrix_mean_15m = data_matrix_15m.mean()

dtmatrix_mean_6h = dmatrix_mean_6h.transpose()
dtmatrix_mean_2h = dmatrix_mean_2h.transpose()
dtmatrix_mean_15m = dmatrix_mean_15m.transpose()

Again, this is another way to print the stations with a small number of validations.

In [14]:
data_matrix_15m.mean(axis=0)[data_matrix_15m.mean(axis=0).sum(axis=1) < 810];
In [15]:
dmatrix_mean_15m.head()
dtmatrix_mean_15m.head()
Out[15]:
2015-10-01 2015-10-02 2015-10-03 2015-10-04 2015-10-05 2015-10-06 2015-10-07 2015-10-08 2015-10-09 2015-10-10 ... 2015-12-22 2015-12-23 2015-12-24 2015-12-25 2015-12-26 2015-12-27 2015-12-28 2015-12-29 2015-12-30 2015-12-31
04:00:00 0.112211 0.188119 0.036304 0.392739 0.072607 0.095710 0.099010 0.108911 0.108911 0.072607 ... 0.066007 0.102310 0.066007 0.029703 0.036304 0.013201 0.062706 0.089109 0.069307 0.075908
04:15:00 0.138614 0.138614 0.029703 0.554455 0.092409 0.082508 0.145215 0.072607 0.108911 0.072607 ... 0.075908 0.069307 0.056106 0.036304 0.013201 0.000000 0.085809 0.089109 0.062706 0.102310
04:30:00 0.174917 0.148515 0.059406 0.422442 0.075908 0.141914 0.141914 0.165017 0.105611 0.102310 ... 0.135314 0.135314 0.128713 0.112211 0.099010 0.079208 0.161716 0.108911 0.099010 0.079208
04:45:00 0.376238 0.462046 0.481848 1.072607 0.396040 0.501650 0.491749 0.584158 0.590759 0.478548 ... 0.468647 0.521452 0.442244 0.339934 0.346535 0.346535 0.432343 0.419142 0.409241 0.495050
05:00:00 2.643564 3.079208 2.458746 3.900990 2.973597 3.112211 3.293729 3.468647 3.254125 2.732673 ... 2.696370 3.207921 2.920792 1.217822 2.019802 1.795380 2.805281 2.993399 3.112211 2.798680

5 rows × 92 columns

Out[15]:
04:00:00 04:15:00 04:30:00 04:45:00 05:00:00 05:15:00 05:30:00 05:45:00 06:00:00 06:15:00 ... 21:30:00 21:45:00 22:00:00 22:15:00 22:30:00 22:45:00 23:00:00 23:15:00 23:30:00 23:45:00
2015-10-01 0.112211 0.138614 0.174917 0.376238 2.643564 19.864686 31.471947 31.874587 40.541254 48.481848 ... 70.927393 66.881188 65.399340 60.584158 60.755776 64.207921 75.128713 64.590759 52.138614 43.818482
2015-10-02 0.188119 0.138614 0.148515 0.462046 3.079208 22.399340 34.537954 34.650165 41.858086 50.056106 ... 78.590759 73.495050 70.907591 65.983498 66.211221 62.917492 66.330033 63.188119 57.755776 54.673267
2015-10-03 0.036304 0.029703 0.059406 0.481848 2.458746 14.254125 20.429043 19.184818 22.465347 24.531353 ... 82.211221 77.181518 73.755776 72.805281 74.570957 76.264026 83.947195 80.049505 76.587459 72.772277
2015-10-04 0.392739 0.554455 0.422442 1.072607 3.900990 15.920792 19.696370 16.963696 16.247525 17.610561 ... 43.570957 43.818482 41.745875 37.462046 36.722772 37.283828 52.759076 37.158416 31.211221 25.254125
2015-10-05 0.072607 0.092409 0.075908 0.396040 2.973597 20.858086 32.630363 32.729373 39.689769 52.072607 ... 61.039604 52.346535 53.125413 46.059406 45.498350 41.036304 41.254125 35.336634 28.003300 22.801980

5 rows × 80 columns

With Outliers

In [16]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dtmatrix_mean_15m.plot(ax=ax[0], legend=False)
ax1.set_xticklabels([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('15min')

ax2 = dtmatrix_mean_2h.plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Number of Validations')
ax2.set_title('2h')
ax2.legend(bbox_to_anchor=(1., 1.01))

ax3 = dtmatrix_mean_6h.plot(ax=ax[2])
ax3.set_xlabel('Days')
ax3.set_ylabel('Number of Validations')
ax3.set_title('6h')
ax3.legend(bbox_to_anchor=(1., 1.01))

plt.xticks(rotation=90)
plt.show();
In [17]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dtmatrix_mean_15m.plot.area(ax=ax[0], legend=False)
ax1.set_xticklabels([])
ax1.set_ylabel('Time')
ax1.set_title('15min')

ax2 = dtmatrix_mean_2h.plot.area(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Time')
ax2.set_title('2h')
ax2.legend(bbox_to_anchor=(1., 1.01))

ax3 = dtmatrix_mean_6h.plot.area(ax=ax[2])
ax3.set_xlabel('Days')
ax3.set_ylabel('Time')
ax3.set_title('6h')
ax3.legend(bbox_to_anchor=(1., 1.01), loc=2)

plt.xticks(rotation=90)
plt.show();
In [18]:
fig = plt.figure(figsize=(16, 6))
gs = gridspec.GridSpec(1, 1)
ax = fig.add_subplot(gs[0])
dmatrix_mean_15m.plot(ax=ax, legend=False)
plt.ylabel('Number of Validations')
plt.title('15min')

plt.xticks(rotation=90)
plt.show();
In [19]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dmatrix_mean_15m.iloc[:, :31].plot(ax=ax[0])
ax1.set_xticklabels([])
ax1.set_ylabel('Days')
ax1.set_title('October\'s number of validations')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)


ax2 = dmatrix_mean_15m.iloc[:, 31:61].plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Days')
ax2.set_title('November\'s number of validations')
ax2.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax3 = dmatrix_mean_15m.iloc[:, 61:].plot(ax=ax[2])
ax3.set_xlabel('Time')
ax3.set_ylabel('Days')
ax3.set_title('December\'s number of validations')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();
In [20]:
f, ax = plt.subplots(2, figsize=(16, 12))

ax1 = dtmatrix_mean_15m.boxplot(return_type='both', ax=ax[0])
ax[0].set_xlabel("Time", fontsize=15)
ax[0].set_ylabel("Number of Validations", fontsize=15)

for tick in ax[0].get_xticklabels():
    tick.set_rotation(90)

ax2 = dmatrix_mean_15m.boxplot(return_type='both', ax=ax[1])
plt.xticks(rotation=90)

plt.tight_layout()
plt.show();

Defining useful variables

In [21]:
from __init__ import *
In [22]:
wd_15m = data_matrix_15m.loc[dict_w.values()]
wdm_15m = wd_15m.mean()
wdmt_15m = wdm_15m.transpose()

wd_15mf = data_matrix_15m.loc[dict_wd_final.values()]
wdm_15mf = wd_15mf.mean()
wdmt_15mf = wdm_15mf.transpose()

Without outliers

In [23]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = wdm_15m.loc[:, dict_wd_oct.values()].plot(ax=ax[0])
ax1.set_xticklabels([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('Octobre')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax2 = wdm_15m.loc[:, dict_wd_nov.values()].plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Number of Validations')
ax2.set_title('Novembre')
ax2.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax3 = wdm_15m.loc[:, dict_wd_dec.values()].plot(ax=ax[2])
ax3.set_xlabel('Time')
ax3.set_ylabel('Number of Validations')
ax3.set_title('Decembre')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();
In [24]:
f, ax = plt.subplots(2, figsize=(16, 8))

ax1 = wdm_15mf.loc[:, dict_wd_novf.values()].plot(ax=ax[0])
ax1.set_xticks([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('Novembre')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax2 = wdm_15mf.loc[:, dict_wd_decf.values()].plot(ax=ax[1])
ax2.set_xlabel('Time')
ax2.set_ylabel('Number of Validations')
ax2.set_title('Decembre')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)
plt.tight_layout()

plt.show();
In [25]:
f, ax = plt.subplots(2, figsize=(16, 12))

ax1 = wdmt_15mf.boxplot(return_type='both', ax=ax[0])
ax[0].set_xlabel("Time", fontsize=15)
ax[0].set_ylabel("Number of Validations", fontsize=15)

for tick in ax[0].get_xticklabels():
    tick.set_rotation(90)

ax2 = wdm_15mf.boxplot(return_type='both', ax=ax[1])
plt.xticks(rotation=90)

plt.tight_layout()
plt.show();
In [26]:
fig, (ax1, ax2) = plt.subplots(2, figsize=(16, 12))

wdm_15mf.plot(ax=ax1, legend=False)
ax1.set_ylabel('Number of Validations'); ax1.set_title('15min')

ax2 = wdmt_15mf.plot(ax=ax2, legend=False)
ax2.set_ylabel('Number of Validations'); ax2.set_title('15min')

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();

Autocorrelation Plots

In [27]:
fig, (ax1, ax2) = plt.subplots(2, figsize=(16, 12))

autocorrelation_plot(wdmt_15mf.mean(), ax=ax1, c='blue')
ax1.set_title('15min discretization matrix')

plot_acf(wdmt_15mf.mean(), ax=ax2, c='blue', title='Auto Correlation')

plt.show();
In [28]:
plt.figure(figsize=(16, 7))

lag_plot(wdmt_15mf.mean(), c='blue')
plt.title('Lag plot 15min discretization matrix')

# plot_pacf(wdmt_15mf.mean(), ax=ax[1], c='blue', title='Partial Auto Correlation')

plt.show();

Splitting Data into Train and Test

In [29]:
# Without outliers with the weekend
dico = dict_w
# Without outliers and without weekend
# dico = dict_wd_final
size = 55
In [30]:
# Define X
X = data_matrix_15m.loc[dico.values()]

# Plot the station's id where the number of validations is less than the
# defined number 'nb_val' below
nb_val = 30
m = X.mean(axis=0).T.mean()
m[m < nb_val].sort_values().plot(kind='barh', grid=True, 
                                 cmap='viridis', 
                                 title="Subways where the mean number of"\
                                 "validations in 15 minutes is less than "\
                                 "{}".format(nb_val),
                                 figsize=(16, 6));

# Delete the stations where the mean number of validations is between 3 and 6
stations_to_del = [4113, 20868]
X.drop(columns=stations_to_del, inplace=True)
Out[30]:
<matplotlib.axes._subplots.AxesSubplot at 0x122f8f748>
In [31]:
Xm = X.mean()
Xmt = Xm.transpose()

try:
    for s in stations_to_del:
        subway_stations.remove(s)
except:
    pass
In [32]:
kw = list(dico.keys())
np.random.shuffle(kw)

vw = [dico[i] for i in kw]
In [33]:
ind_train = vw[:size]
ind_test = vw[size:]
X_train = X[sorted(ind_train)]
X_test = X[sorted(ind_test)]
In [34]:
X_train
X_test
Out[34]:
<class 'pandas.core.panel.Panel'>
Dimensions: 55 (items) x 301 (major_axis) x 80 (minor_axis)
Items axis: 2015-10-01 to 2015-12-20
Major_axis axis: 198 to 60982
Minor_axis axis: 04:00:00 to 23:45:00
Out[34]:
<class 'pandas.core.panel.Panel'>
Dimensions: 23 (items) x 301 (major_axis) x 80 (minor_axis)
Items axis: 2015-10-06 to 2015-12-19
Major_axis axis: 198 to 60982
Minor_axis axis: 04:00:00 to 23:45:00
In [35]:
X_min_max = X_train.apply(lambda x: (x.min().min(), x.max().max()), axis=(0, 2))
In [36]:
def normalize(X, X_min_max, station_id, a=0, b=1):
    """
    """
    mini, maxi = X_min_max.loc[station_id]
    return (a + ((X.loc[:, station_id] - mini) * (b - a)) / (maxi - mini)).values

def denormalize(X, X_min_max, station_id, a=0, b=1):
    """
    """
    mini, maxi = X_min_max.loc[station_id]
    return (((X.loc[:, station_id] - a) * (maxi - mini)) / (b - a) + mini).values
    
In [37]:
a, b = -1, 1
Xn_train = X_train.apply(lambda x: a + ((x - x.min().min()) * (b - a)) /(x.max().max() - x.min().min()), 
                         axis=(0, 2)).transpose(2, 0, 1)
In [38]:
# Normalize X_test
Xn_test = pd.Panel(np.array(list(map(lambda station_id: normalize(X_test, 
                                                                  X_min_max, 
                                                                  station_id, a=a, b=b), 
                                     X_test.transpose(1, 0, 2)))).transpose(2, 0, 1),
                   items=list(X_test.items),
                   major_axis=subway_stations,
                   minor_axis=generate_times("15min")[(del_hours * 4):])

# Denormalize X_test to verify that it is working
Xdn_test = pd.Panel(np.array(list(map(lambda station_id: denormalize(Xn_test, 
                                                                     X_min_max, 
                                                                     station_id, a=a, b=b), 
                                     Xn_test.transpose(1, 0, 2)))).transpose(2, 0, 1),
                   items=list(Xn_test.items),
                   major_axis=subway_stations,
                   minor_axis=generate_times("15min")[(del_hours * 4):])

Models

Baseline

In [39]:
def baseline_plot_results(levels):
    """
    
    """
    
    baseline_scores = []
    baseline_preds = []
    for level in levels:
        b = Baseline(level=level, first_ndays=5)
        b.fit(X_train)
        baseline_preds.append(b.predict(X_test))
        baseline_scores.append(b.score(X_test))
    
    df_baseline_scores = pd.DataFrame(np.array(baseline_scores).T,
                                 index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'],
                                 columns=levels)
    display(HTML(df_baseline_scores.to_html()))
    pd.DataFrame(df_baseline_scores.loc['RMSE'].values.repeat(4).reshape(-1, 4).T,
                 columns=levels).plot(figsize=(16, 4), kind='line');
    
    return df_baseline_scores, baseline_preds
In [40]:
levels = ["None", "s"]
df_baseline_scores, baseline_preds = baseline_plot_results(levels)
None s
R2 0.076023 0.840334
RMSE 343.206946 142.669843
MSE 117791.007753 20354.683961
MAE 2.076667 0.541091
MAPE 207.666677 54.109098
MPE -184.812677 -27.527993
In [41]:
from cost_functions import mse, mse_g
from sklearn.linear_model import LinearRegression, Lasso

class myAR(Regressor):
    def __init__(self, order=4, level=None, loss=mse, loss_g=mse_g, max_iter=1000,
                 eps=0.01):
        """ Initialisation des paramètres du perceptron

        :param order: Taille de la fenêtre glissante
        :param loss: fonction de coût
        :param loss_g: gradient de la fonction coût
        :param max_iter: nombre maximum d'itération de la fonction coût
        :param eps: pas du gradient


        """

        self.order = order
        self.level = level
        self.max_iter, self.eps = max_iter, eps
        self.loss, self.loss_g = loss, loss_g
        self.w = np.random.random(self.order)
                      
    
    @Regressor.datax_decorator
    def analytic_fit(self, datax):
        """ Finds the optimal weigths analytically 
        
        :param datax: contient tous les exemples du dataset
        :returns: void
        :rtype: None
        
        """
        
        self.reg = LinearRegression()
        _, self.X, self.y = datax
        A, B = self.X.T.dot(self.X), self.X.T.dot(self.y)
        self.w1 = np.linalg.solve(A, B).ravel()
        self.reg.fit(self.X, self.y)
        self.w = self.reg.coef_.squeeze()
        display(HTML(pd.DataFrame(self.w.reshape(1, -1), index=['Weights'], 
                                  columns=range(1, len(self.w)+1)).to_html()))
        return self

    def minibatch_fit(self, datax):
        """ Mini-Batch gradient descent Learning

        :param datax: contient tous les exemples du dataset
        
        """

        for _ in range(self.max_iter):
            for d in range(datax.shape[0]):
                for t in range(datax.shape[2] - self.order):
                    batchx = datax.iloc[d, :, t:t + self.order].values
                    batchy = datax.iloc[d, :, t + self.order].values
                    self.w -= (self.eps * self.loss_g(batchx, batchy, self.w))
    
    def reshaped(self, y_pred, datax):
        """
        """
        
        if datax.ndim == 3:
            return y_pred.reshape((datax.shape[0] * datax.shape[1], 
                                datax.shape[2] - self.order), 
                                order='F').reshape((datax.shape[0],
                                                   datax.shape[1],
                                                   datax.shape[2] - self.order))
        elif datax.ndim == 2:
            return y_pred.reshape((datax.shape[0], datax.shape[1] - self.order),
                                order='F')
    
    @Regressor.datax_decorator
    def forecast(self, datax, tplus):
        """
        
        """
        
        datax, self.X_test, self.y_test = datax
        
        if tplus == 1:
            return self.reshaped(self.reg.predict(self.X_test), datax)
            
        else:
            self.X_test = self.X_test.reshape(datax.shape[-1] - self.order, -1, self.order)
        
            tmp = self.X_test[0]
            y_pred = self.reg.predict(tmp)
            pred = y_pred.copy()

            for x in self.X_test[1:]:
                x[:, -1] = pred.squeeze()
                x[:, -tplus:-1] = tmp[:, -tplus+1:]
                tmp = x.copy()
                pred = self.reg.predict(tmp)
                y_pred = np.vstack((y_pred, pred))

            return self.reshaped(y_pred, datax)
In [42]:
class ARMA2(myAR):
    def __init__(self, p, q, tplus, level=None, loss=mse, loss_g=mse_g, max_iter=1000,
                 eps=0.01, fit_intercept=True):
        """
        
        """
        
        super().__init__(p, level, loss, loss_g, max_iter, eps)
        self.p = p
        self.q = q
        self.tplus = tplus
        
        # q cannot be greater than t-1
        if self.q > self.tplus - 1:
            self.q = self.tplus - 1
            
        # For bias
        self.fit_intercept = fit_intercept
    
    @Regressor.datax_decorator
    def decorate_shape(self, datax):
        """
        """
        
        return datax
    
    @Regressor.datax_decorator
    def fit(self, datax):
        """
        
        """
        
        datax, self.X_train, self.y_train = datax
        
        # Fit on train data
        super().analytic_fit(datax)
        
        # Predict on train data
        y_pred_train = super().forecast(datax, self.tplus)
        
        # Compute residuals
        
        if datax.ndim == 3:
            residuals = y_pred_train - datax.iloc[:, :, self.p:].values

            # Add zeros to the residuals and convert it into panel to match datax
            zeros = np.zeros((residuals.shape[0], residuals.shape[1], self.p))
            residuals = np.concatenate((zeros, residuals), axis=2)
            pd_residuals =  pd.Panel(residuals,
                                     items=list(datax.items),
                                     major_axis=list(datax.major_axis),
                                     minor_axis=list(datax.minor_axis))
        elif datax.ndim == 2:
            residuals = y_pred_train - datax.iloc[:, self.p:].values

            # Add zeros to the residuals and convert it into panel to match datax
            zeros = np.zeros((residuals.shape[0], self.p))
            residuals = np.concatenate((zeros, residuals), axis=1)
            pd_residuals =  pd.DataFrame(residuals,
                                         index=list(datax.index),
                                         columns=list(datax.columns))
                                             
        # Reshaping X_err_train to match X_train
        residuals, X_err_train, y_err_train = self.decorate_shape(pd_residuals)
        
        # Add self.q errors to X_train
        self.X_train = self.X_train if self.q == 0 else np.concatenate(
            (self.X_train, X_err_train[:, -self.q:]), axis=1)
        
        # Fit new training data (with residuals)
        self.reg = LinearRegression(fit_intercept=self.fit_intercept)
        self.reg.fit(self.X_train, self.y_train)
        self.w = self.reg.coef_.squeeze()
        display(HTML(pd.DataFrame(self.w.reshape(1, -1), index=['Weights'], 
                                  columns=range(1, len(self.w)+1)).to_html()))
        
        return self

    @Regressor.datax_decorator
    def forecast(self, datax):
        """
        
        """
        
        datax, self.X_test, self.y_test = datax
        zeros = np.zeros((self.X_test.shape[0], self.q))
        if self.tplus == 1:
            self.X_test = np.concatenate((self.X_test, zeros), axis=1)
            return self.reshaped(self.reg.predict(self.X_test), datax)
            
        else:
            zeros = zeros.reshape(datax.shape[-1] - self.order, -1, self.q)
            self.X_test = self.X_test.reshape(datax.shape[-1] - self.order, -1, self.order)
            self.X_test = np.concatenate((self.X_test, zeros), axis=2)
        
            tmp = self.X_test[0]
            y_pred = self.reg.predict(tmp)
            pred = y_pred.copy()

            for x in self.X_test[1:]:
                x[:, -1-self.q] = pred.squeeze()
                x[:, -self.tplus-self.q:-1-self.q] = tmp[:, -self.tplus+1-self.q:-self.q]
                tmp = x.copy()
                pred = self.reg.predict(tmp)
                y_pred = np.vstack((y_pred, pred))

            return self.reshaped(y_pred, datax)
        
class ARMA(myAR):
    def __init__(self, p, q, tplus, level=None, loss=mse, loss_g=mse_g, max_iter=1000,
                 eps=0.01, fit_intercept=True):
        """
        
        """
        
        super().__init__(p, level, loss, loss_g, max_iter, eps)
        self.p = p
        self.q = q
        self.tplus = tplus
        
        # q cannot be greater than p
        if self.q > self.p:
            self.q = self.p
            
        # For bias
        self.fit_intercept = fit_intercept
    
    @Regressor.datax_decorator
    def decorate_shape(self, datax):
        """
        """
        
        return datax
    
    @Regressor.datax_decorator
    def fit(self, datax):
        """
        
        """
        
        datax, self.X_train, self.y_train = datax
        
        # Fit on train data
        super().analytic_fit(datax)
        
        # Predict on train data
        y_pred_train = super().forecast(datax, self.tplus)
        
        # Compute residuals
        
        if datax.ndim == 3:
            residuals = y_pred_train - datax.iloc[:, :, self.p:].values

            # Add zeros to the residuals and convert it into panel to match datax
            zeros = np.zeros((residuals.shape[0], residuals.shape[1], self.p))
            residuals = np.concatenate((zeros, residuals), axis=2)
            pd_residuals =  pd.Panel(residuals,
                                     items=list(datax.items),
                                     major_axis=list(datax.major_axis),
                                     minor_axis=list(datax.minor_axis))
        elif datax.ndim == 2:
            residuals = y_pred_train - datax.iloc[:, self.p:].values

            # Add zeros to the residuals and convert it into panel to match datax
            zeros = np.zeros((residuals.shape[0], self.p))
            residuals = np.concatenate((zeros, residuals), axis=1)
            pd_residuals =  pd.DataFrame(residuals,
                                         index=list(datax.index),
                                         columns=list(datax.columns))
                                             
        # Reshaping X_err_train to match X_train
        residuals, X_err_train, y_err_train = self.decorate_shape(pd_residuals)
        
        # Add self.q errors to X_train
        self.X_train = self.X_train if self.q == 0 else np.concatenate(
            (self.X_train, X_err_train[:, :self.q]), axis=1)
        
        # Fit new training data (with residuals)
        self.reg = LinearRegression(fit_intercept=self.fit_intercept)
        self.reg.fit(self.X_train, self.y_train)
        self.w = self.reg.coef_.squeeze()
        display(HTML(pd.DataFrame(self.w.reshape(1, -1), index=['Weights'], 
                                  columns=range(1, len(self.w)+1)).to_html()))
        
        return self

    @Regressor.datax_decorator
    def forecast(self, datax):
        """
        
        """
        
        datax, self.X_test, self.y_test = datax
        zeros = np.zeros((self.X_test.shape[0], self.q))
        if self.tplus == 1:
            self.X_test = np.concatenate((self.X_test, zeros), axis=1)
            return self.reshaped(self.reg.predict(self.X_test), datax)
            
        else:
            zeros = zeros.reshape(datax.shape[-1] - self.order, -1, self.q)
            self.X_test = self.X_test.reshape(datax.shape[-1] - self.order, -1, self.order)
            self.X_test = np.concatenate((self.X_test, zeros), axis=2)
        
            tmp = self.X_test[0]
            y_pred = self.reg.predict(tmp)
            pred = y_pred.copy()

            for x in self.X_test[1:]:
                x[:, -1-self.q] = pred.squeeze()
                x[:, -self.tplus - self.q:-1 - self.q] = tmp[:, -self.tplus + 1 - self.q:-self.q]
                tmp = x.copy()
                pred = self.reg.predict(tmp)
                y_pred = np.vstack((y_pred, pred))

            return self.reshaped(y_pred, datax)
In [43]:
def panelIt(X_pred, X_test, order, subway_stations, del_hours=0):
    """

    """

    wd_testorder_15m = X_test.iloc[:, :, order:]
    minor_axis = generate_times("15min")[(del_hours * 4) + order:]
    
    return pd.Panel(X_pred,
                    items=list(wd_testorder_15m.items),
                    major_axis=subway_stations,
                    minor_axis=minor_axis)
In [44]:
class theAR(Baseline):
    station_id = 0
    def __init__(self, level=None, first_ndays=7, **kwargs):
        """
        
        """
        
        super().__init__(level, first_ndays)
        self.kwargs = kwargs
        
    def fit(self, datax):
        """
        
        """
        
        if self.level is None:
            self.model = myAR(**self.kwargs)
            self.model.analytic_fit(datax)
            
        elif self.level.lower() == "s":
            
            self.models = []            
            
            datax.apply(lambda station: self.models.append(
                myAR(**self.kwargs).analytic_fit(station.T)), 
                        axis=(0, 2))
        
        elif self.level.lower() == "j":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean().mean(axis=1))      
                
        elif self.level.lower() == "sj":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean(axis=0))
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def predict(self, datax, tplus=None):
        """
        
        """
        
        def predict_for_station(x, tplus):
            """
            """
            
            station_pred = self.models[self.station_id].forecast(x, tplus)
            self.station_id += 1
            
            return station_pred
        
        if self.level is None:
            
            X_pred = self.model.forecast(datax, tplus)
            return panelIt(X_pred, datax, self.model.order, subway_stations, del_hours)
            
        elif self.level.lower() == "s":
            
            X_pred = datax.apply(lambda x: predict_for_station(x.T, tplus), 
                                 axis=(0, 2)).transpose(1, 0, 2)
            self.station_id = 0
            
            return panelIt(X_pred.values, datax, self.models[0].order, subway_stations, del_hours)
        
        elif self.level.lower() == "j":
            # TODO
            pass
        elif self.level.lower() == "sj":
            # TODO
            pass
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def score(self, datax, X_pred, level):
        """
        """
        
        if level == None:
            self.scores = super().metrics_score(
                datax.iloc[:, :, self.model.order:], X_pred.values)
        elif level == 's':
            self.scores = super().metrics_score(
                datax.iloc[:, :, self.models[0].order:], X_pred.values)
            
        return self.scores
    
In [45]:
class theARMA(theAR):
    station_id = 0
    def __init__(self, p, q, tplus, level=None, first_ndays=7, **kwargs):
        """
        
        """
        
        super().__init__(level, first_ndays, **kwargs)
        self.p = p
        self.q = q
        self.tplus = tplus
        self.kwargs = kwargs
        
    def fit(self, datax):
        """
        
        """
        
        if self.level is None:
            self.model = ARMA(p=self.p, q=self.q, tplus=self.tplus)
            self.model.fit(datax)
            
        elif self.level.lower() == "s":
            
            self.models = []            
            
            datax.apply(lambda station: self.models.append(
                ARMA(p=self.p, q=self.q, tplus=self.tplus).fit(station.T)), 
                        axis=(0, 2))
        
        elif self.level.lower() == "j":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean().mean(axis=1))      
                
        elif self.level.lower() == "sj":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean(axis=0))
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def predict(self, datax):
        """
        
        """
        
        def predict_for_station(x):
            """
            """
            
            station_pred = self.models[self.station_id].forecast(x)
            self.station_id += 1
            
            return station_pred
        
        if self.level is None:
            
            X_pred = self.model.forecast(datax)
            return panelIt(X_pred, datax, self.model.order, subway_stations, del_hours)
            
        elif self.level.lower() == "s":
            
            X_pred = datax.apply(lambda x: predict_for_station(x.T), 
                                 axis=(0, 2)).transpose(1, 0, 2)
            self.station_id = 0
            
            return panelIt(X_pred.values, datax, self.models[0].order, subway_stations, del_hours)
        
        elif self.level.lower() == "j":
            # TODO
            pass
        elif self.level.lower() == "sj":
            # TODO
            pass
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
In [46]:
def ar_plot_results(level, order, limit_t, X_train=Xn_train, X_test=Xn_test, 
                    X_min_max=X_min_max, a=a, b=b):
    """
    
    """
    
    ar_scores = []
    ar_preds = []
    ar = theAR(level=level, order=order)
    
    print("Fitting...")
    ar.fit(X_train)
    
    
    Xdn_test = pd.Panel(np.array(list(map(lambda station_id: 
                                          denormalize(X_test, 
                                                      X_min_max, 
                                                      station_id, a=a, b=b), 
                                     X_test.transpose(1, 0, 2)))).transpose(2, 0, 1),
                   items=list(X_test.items),
                   major_axis=subway_stations,
                   minor_axis=generate_times("15min")[(del_hours * 4):])
    
    print("Predicting...")

    for t in range(1, limit_t+1):
        X_pred = ar.predict(X_test, t)
        
        Xdn_pred = pd.Panel(np.array(list(map(lambda station_id: 
                                              denormalize(X_pred, 
                                                         X_min_max, 
                                                         station_id, a=a, b=b),
                                              X_pred.transpose(1, 0, 2)))).transpose(2, 0, 1),
                                     items=list(X_pred.items),
                                     major_axis=list(X_pred.major_axis),
                                     minor_axis=list(X_pred.minor_axis))
        
        ar_preds.append(Xdn_pred)
        
        ar_scores.append(ar.score(Xdn_test, Xdn_pred, level))
    
    display(HTML((pd.DataFrame(np.array(ar_scores).T, 
                               index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'], 
                               columns=list(map(
                                   lambda x: "t+"+str(x),
                                   range(1, len(ar_scores)+1))))).to_html()))
    
    
    return ar_preds, ar_scores

def arma_plot_results(level, p, q, limit_t, X_train=Xn_train, X_test=Xn_test, 
                    X_min_max=X_min_max, a=a, b=b):
    """
    
    """
    
    ar_scores = []
    ar_preds = []
    
    Xdn_test = pd.Panel(np.array(list(map(lambda station_id: 
                                          denormalize(X_test, 
                                                      X_min_max, 
                                                      station_id, a=a, b=b), 
                                     X_test.transpose(1, 0, 2)))).transpose(2, 0, 1),
                   items=list(X_test.items),
                   major_axis=subway_stations,
                   minor_axis=generate_times("15min")[(del_hours * 4):])
    
    for t in range(1, limit_t+1):

        arma = theARMA(p, q, t, level=level)

        print("Fitting t+{}...".format(t))
        arma.fit(X_train)
        
        print("Predicting t+{}...".format(t))
        X_pred = arma.predict(X_test)
        
        Xdn_pred = pd.Panel(np.array(list(map(lambda station_id: 
                                              denormalize(X_pred, 
                                                         X_min_max, 
                                                         station_id, a=a, b=b),
                                              X_pred.transpose(1, 0, 2)))).transpose(2, 0, 1),
                                     items=list(X_pred.items),
                                     major_axis=list(X_pred.major_axis),
                                     minor_axis=list(X_pred.minor_axis))
        
        ar_preds.append(Xdn_pred)
        
        ar_scores.append(arma.score(Xdn_test, Xdn_pred, level))
    
    display(HTML((pd.DataFrame(np.array(ar_scores).T, 
                               index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'], 
                               columns=list(map(
                                   lambda x: "t+"+str(x),
                                   range(1, len(ar_scores)+1))))).to_html()))
    
    
    return ar_preds, ar_scores


def plot_qualitative_analysis(ar_preds, X_test, limit_t, order, subway_stations, del_hours):
    """
    
    """
    
    fig, ax = plt.subplots(limit_t+1, figsize=(16, limit_t*4))

    wd_testorder_15m = X_test.iloc[:, :, order:]
    wdm_testorder_15m = wd_testorder_15m.mean()

    wdm_testorder_15m.plot(ax=ax[0])
    ax[0].set_ylabel('Number of Validations')
    ax[0].set_title('Test')
    ax[0].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
               borderaxespad=0.)

    for i in range(limit_t):
        pred_t = ar_preds[i].mean()
        pred_t.plot(ax=ax[i+1])
        ax[i+1].set_ylabel('Number of Validations')
        ax[i+1].set_title("Predict t+{}".format(i+1))
        ax[i+1].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
                     borderaxespad=0.)

    plt.tight_layout()
    plt.show();
    
def plot_specific(X_test, baseline_preds, ar_preds, ar_preds_s, order, limit_t, j, s):
    
    fig, ax = plt.subplots(limit_t, figsize=(16, limit_t*5))

    for t in range(limit_t):
        ar_preds[t].iloc[j, s].plot(ax=ax[t], label='General AR')
        ar_preds_s[t].iloc[j, s].plot(ax=ax[t], label='AR By Station')
        X_test.iloc[j, s].plot(ax=ax[t], label="Real values")
        baseline_preds[0].iloc[j, s].plot(ax=ax[t], style=['.--'], label='General Baseline')
        baseline_preds[1].iloc[j, s].plot(ax=ax[t], style=['.--'], label='Baseline per station')
        ax[t].set_ylabel('Number of Validations')
        ax[t].set_title("AR models at t+{} with an order of {}".format(t+1, order))
        ax[t].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=1, loc=2,
                     borderaxespad=0.)

    plt.tight_layout()
    plt.show();
    
def plot_bispecific(X_test, baseline_preds, ar_preds, ar_preds_s, order, limit_t, j, s):
    
    fig, ax = plt.subplots(limit_t, 2, figsize=(16, limit_t*5))

    for t in range(limit_t):
        ar_preds[t].iloc[j, s].plot(ax=ax[t][0], label='General AR')
        ar_preds_s[t].iloc[j, s].plot(ax=ax[t][1], label='AR By Station')
        for c in range(2):
            X_test.iloc[j, s].plot(ax=ax[t][c], label="Real values")
            baseline_preds[0].iloc[j, s].plot(ax=ax[t][c], style=['.--'], label='General Baseline')
            baseline_preds[1].iloc[j, s].plot(ax=ax[t][c], style=['.--'], label='Baseline per station')
            ax[t][c].set_ylabel('Number of Validations')
            ax[t][c].set_title("AR models at t+{} with an order of {}".format(t+1, order))
            ax[t][c].legend(bbox_to_anchor=(0.68, 0.9, 1.1, .102), ncol=1, loc=2,
                            borderaxespad=0.)
        

    plt.tight_layout()
    plt.show();
    
def plot_diff_along_time(X_test, ar_preds):
    
    order = X_test.shape[2] - ar_preds[0].shape[2]    
    res = []
    for t in range(len(ar_preds)):
        res.append(np.sqrt(((X_test.iloc[:, :, order:].values - 
                           ar_preds[t].values)**2).mean(axis=(0, 1))))
    
    pd_res = pd.DataFrame(np.array(res).T, index=list(ar_preds[t].minor_axis),
                          columns=list(map(lambda x: "t+"+str(x),
                                           range(1, len(ar_preds)+1))))
    pd_res.plot(subplots=True, figsize=(16, len(ar_preds)*2), 
                title='Plot of RMSE between predicted and real values along days')
                                                    

Must solve it !

AR

In [47]:
order, limit_t = 4, 4
In [48]:
%%time
ar_preds, ar_scores = ar_plot_results(None, order, limit_t)
Fitting...
1 2 3 4
Weights -0.07191 -0.14902 0.289492 0.849837
Predicting...
t+1 t+2 t+3 t+4
R2 0.957309 -9.536362e+04 -0.580966 0.488863
RMSE 75.311650 1.125609e+05 458.305684 260.592995
MSE 5671.844689 1.266996e+10 210044.100208 67908.709103
MAE 0.363100 5.873412e+02 2.542961 1.411893
MAPE 36.310028 5.873412e+04 254.296111 141.189291
MPE -21.190840 1.358043e+04 -85.950406 -105.486529
CPU times: user 4.84 s, sys: 2.44 s, total: 7.28 s
Wall time: 6.43 s
In [49]:
plot_diff_along_time(X_test, ar_preds)
In [50]:
%%time
ar_preds_s, ar_scores_s = ar_plot_results("s", order, limit_t)
Fitting...
1 2 3 4
Weights -0.255585 -0.01136 0.313564 0.818924
1 2 3 4
Weights -0.055682 0.053627 0.145081 0.731377
1 2 3 4
Weights -0.086472 -0.290653 0.199221 1.072153
1 2 3 4
Weights -0.074532 -0.256497 0.560001 0.652402
1 2 3 4
Weights 0.04503 -0.041891 0.158835 0.788967
1 2 3 4
Weights -0.071104 -0.204754 0.120356 1.038711
1 2 3 4
Weights -0.08688 -0.142759 0.415225 0.736634
1 2 3 4
Weights -0.185678 -0.066327 0.101592 1.049717
1 2 3 4
Weights -0.129815 -0.097025 0.078189 1.056087
1 2 3 4
Weights -0.021637 -0.243946 -0.082888 1.25544
1 2 3 4
Weights -0.158904 -0.222866 0.646848 0.591004
1 2 3 4
Weights -0.041966 -0.098744 0.246228 0.797511
1 2 3 4
Weights 0.302188 -0.254555 0.42137 0.454084
1 2 3 4
Weights -0.033364 -0.239709 0.502055 0.69789
1 2 3 4
Weights 0.015932 -0.166423 0.240362 0.848702
1 2 3 4
Weights -0.061307 -0.197824 0.167681 0.994309
1 2 3 4
Weights -0.080071 -0.155193 0.237343 0.907938
1 2 3 4
Weights -0.054032 0.094842 0.238382 0.609204
1 2 3 4
Weights -0.136526 -0.028739 0.178496 0.911827
1 2 3 4
Weights 0.036465 -0.410082 0.254123 1.017688
1 2 3 4
Weights -0.138808 -0.140558 0.222161 0.9519
1 2 3 4
Weights -0.101423 -0.189763 0.264213 0.914141
1 2 3 4
Weights -0.069466 -0.255923 0.13 1.076936
1 2 3 4
Weights -0.185295 -0.016679 -0.074843 1.191167
1 2 3 4
Weights 0.009591 -0.300259 0.464256 0.756829
1 2 3 4
Weights -0.004418 -0.162062 0.325373 0.773807
1 2 3 4
Weights -0.253484 0.068496 0.305645 0.809106
1 2 3 4
Weights 0.051855 -0.234672 0.396711 0.712681
1 2 3 4
Weights -0.06007 -0.049043 0.263158 0.724431
1 2 3 4
Weights 0.121114 -0.398591 0.344617 0.847636
1 2 3 4
Weights -0.085934 -0.380161 0.471132 0.929467
1 2 3 4
Weights -0.022663 -0.364238 0.241989 1.040159
1 2 3 4
Weights -0.100184 -0.107912 0.18664 0.925849
1 2 3 4
Weights -0.05212 -0.216472 0.344732 0.843072
1 2 3 4
Weights 0.170159 0.113226 -0.024498 0.634672
1 2 3 4
Weights -0.030164 -0.123085 0.301298 0.791196
1 2 3 4
Weights -0.051505 0.010284 0.142652 0.797204
1 2 3 4
Weights -0.095866 -0.111411 0.232374 0.843406
1 2 3 4
Weights 0.039805 -0.386263 0.324459 0.959719
1 2 3 4
Weights -0.035947 -0.371006 0.345183 0.990275
1 2 3 4
Weights -0.000588 0.001096 0.316104 0.615533
1 2 3 4
Weights 0.117621 0.113051 0.004942 0.647582
1 2 3 4
Weights -0.139885 0.094827 0.219763 0.740501
1 2 3 4
Weights -0.150142 -0.012396 0.204563 0.840102
1 2 3 4
Weights -0.131747 -0.110322 0.28954 0.867265
1 2 3 4
Weights 0.055366 -0.559806 0.343571 1.066679
1 2 3 4
Weights -0.04913 -0.06709 0.20988 0.805025
1 2 3 4
Weights -0.064604 -0.19958 0.154244 1.043617
1 2 3 4
Weights -0.172486 -0.207997 0.384411 0.899286
1 2 3 4
Weights 0.280663 0.076554 0.075619 0.503552
1 2 3 4
Weights -0.023608 -0.217333 0.123052 1.039497
1 2 3 4
Weights -0.060199 -0.109318 0.389158 0.671495
1 2 3 4
Weights -0.103878 -0.049245 0.226935 0.853896
1 2 3 4
Weights 0.064955 -0.32729 0.273072 0.924667
1 2 3 4
Weights -0.217135 0.108121 0.38163 0.611587
1 2 3 4
Weights 0.046668 0.254702 -0.008694 0.567613
1 2 3 4
Weights -0.144928 -0.148965 0.15578 1.064264
1 2 3 4
Weights 0.102255 -0.094667 0.168106 0.722174
1 2 3 4
Weights 0.007507 -0.309365 0.068561 1.122165
1 2 3 4
Weights -0.056488 -0.093323 0.301315 0.759553
1 2 3 4
Weights -0.088228 -0.108627 0.095778 1.018565
1 2 3 4
Weights 0.118567 0.115323 0.059806 0.608699
1 2 3 4
Weights 0.202978 -0.116824 0.268232 0.583766
1 2 3 4
Weights -0.207456 0.030362 0.349792 0.726016
1 2 3 4
Weights 0.107526 0.034438 -0.100424 0.865525
1 2 3 4
Weights -0.06008 -0.23499 0.064896 1.127952
1 2 3 4
Weights -0.214086 -0.064483 0.353301 0.786902
1 2 3 4
Weights -0.11463 -0.124054 -0.082009 1.240807
1 2 3 4
Weights 0.051376 0.014064 0.042212 0.812644
1 2 3 4
Weights -0.014391 -0.274008 0.012119 1.158152
1 2 3 4
Weights -0.148375 0.06181 0.358565 0.615264
1 2 3 4
Weights -0.194388 -0.182617 0.506295 0.773552
1 2 3 4
Weights -0.099093 -0.024259 -0.100024 1.13517
1 2 3 4
Weights -0.140298 -0.083588 0.348142 0.721826
1 2 3 4
Weights 0.000159 0.000057 0.321995 0.602528
1 2 3 4
Weights 0.313583 0.032354 0.036196 0.393361
1 2 3 4
Weights -0.033068 -0.340202 0.600944 0.706937
1 2 3 4
Weights -0.231421 -0.092669 0.371942 0.87992
1 2 3 4
Weights -0.170143 -0.145339 0.233367 0.976749
1 2 3 4
Weights -0.010278 -0.010987 0.273435 0.612782
1 2 3 4
Weights -0.156512 -0.09238 0.328982 0.820704
1 2 3 4
Weights 0.004969 -0.458166 0.406672 0.938217
1 2 3 4
Weights -0.08685 -0.041725 -0.073327 1.100163
1 2 3 4
Weights 0.082665 -0.067993 -0.019753 0.882192
1 2 3 4
Weights -0.173816 -0.125638 0.145103 1.053924
1 2 3 4
Weights -0.034862 -0.347491 0.267008 0.990461
1 2 3 4
Weights -0.202948 -0.05138 0.256593 0.906465
1 2 3 4
Weights -0.111722 -0.078554 0.276436 0.806268
1 2 3 4
Weights -0.127425 -0.2547 0.324727 0.949023
1 2 3 4
Weights -0.036441 -0.147237 0.376229 0.700271
1 2 3 4
Weights 0.001857 -0.369751 0.133213 1.143233
1 2 3 4
Weights 0.010257 -0.499282 0.366416 1.01717
1 2 3 4
Weights -0.174195 -0.109095 0.2964 0.859678
1 2 3 4
Weights -0.05009 -0.178747 0.314264 0.865257
1 2 3 4
Weights -0.068862 -0.284481 0.388201 0.842867
1 2 3 4
Weights -0.047376 -0.222041 0.304954 0.869161
1 2 3 4
Weights -0.087775 -0.256047 0.165117 1.068152
1 2 3 4
Weights -0.146824 -0.275863 0.269877 1.048894
1 2 3 4
Weights -0.060379 -0.281576 0.46896 0.789163
1 2 3 4
Weights -0.027073 -0.477138 0.552234 0.857717
1 2 3 4
Weights -0.031205 -0.31744 0.479283 0.786598
1 2 3 4
Weights 0.019677 -0.192076 0.465378 0.644047
1 2 3 4
Weights -0.175035 -0.047698 0.165238 0.949378
1 2 3 4
Weights -0.013304 -0.260061 0.275381 0.953007
1 2 3 4
Weights -0.060145 -0.017365 0.121062 0.834593
1 2 3 4
Weights -0.11589 -0.02772 0.070577 0.980401
1 2 3 4
Weights -0.137855 0.019047 0.257351 0.773571
1 2 3 4
Weights -0.14242 -0.224934 0.231638 1.035725
1 2 3 4
Weights -0.096865 -0.089163 0.420466 0.653252
1 2 3 4
Weights -0.216429 -0.066451 0.32581 0.825182
1 2 3 4
Weights -0.127089 0.076313 0.103513 0.835885
1 2 3 4
Weights -0.011454 -0.396302 0.542473 0.810253
1 2 3 4
Weights -0.02051 -0.120254 0.443025 0.604001
1 2 3 4
Weights -0.0634 -0.119702 0.168568 0.880941
1 2 3 4
Weights -0.207299 0.008564 0.188248 0.930583
1 2 3 4
Weights -0.08164 -0.116078 0.343197 0.786344
1 2 3 4
Weights -0.075242 -0.131763 0.221482 0.890717
1 2 3 4
Weights -0.068986 -0.028425 0.320641 0.627344
1 2 3 4
Weights -0.060834 -0.215924 0.224918 0.916893
1 2 3 4
Weights 0.091637 -0.310991 0.271896 0.852402
1 2 3 4
Weights -0.210401 0.005046 0.215832 0.866152
1 2 3 4
Weights -0.017376 -0.19665 0.259883 0.842372
1 2 3 4
Weights -0.054596 -0.365346 0.436941 0.896725
1 2 3 4
Weights 0.080526 0.15308 0.030096 0.690539
1 2 3 4
Weights -0.114153 -0.132032 0.283141 0.876998
1 2 3 4
Weights -0.054835 -0.059695 0.227531 0.839687
1 2 3 4
Weights -0.07124 -0.243637 0.520031 0.724902
1 2 3 4
Weights -0.054432 -0.368236 0.621986 0.736435
1 2 3 4
Weights -0.018126 -0.327098 0.336516 0.909628
1 2 3 4
Weights 0.193591 0.182285 0.049013 0.493012
1 2 3 4
Weights -0.144758 -0.147211 0.23333 0.955593
1 2 3 4
Weights -0.067341 -0.261541 0.271173 0.983236
1 2 3 4
Weights -0.16143 0.020023 0.100289 0.952162
1 2 3 4
Weights 0.097577 -0.406603 0.420521 0.816355
1 2 3 4
Weights -0.129531 -0.047099 0.209134 0.929257
1 2 3 4
Weights -0.103135 -0.349689 0.353397 0.982907
1 2 3 4
Weights -0.024491 -0.172015 0.233097 0.917904
1 2 3 4
Weights 0.024492 -0.06428 0.278425 0.669044
1 2 3 4
Weights 0.015033 -0.168414 0.214371 0.868737
1 2 3 4
Weights -0.118449 -0.236705 0.163722 1.085047
1 2 3 4
Weights -0.13618 -0.043511 0.238105 0.868153
1 2 3 4
Weights -0.123087 -0.172625 0.298519 0.880914
1 2 3 4
Weights -0.135197 -0.166029 0.241105 0.954836
1 2 3 4
Weights 0.005223 -0.168892 0.367337 0.683269
1 2 3 4
Weights -0.093757 -0.061781 0.219476 0.841735
1 2 3 4
Weights -0.0473 -0.257459 0.333606 0.907321
1 2 3 4
Weights -0.18871 0.051217 -0.058647 1.107192
1 2 3 4
Weights -0.012666 -0.372996 0.494787 0.807934
1 2 3 4
Weights -0.198087 -0.100607 0.285524 0.922427
1 2 3 4
Weights -0.037168 -0.240259 0.19016 0.998595
1 2 3 4
Weights 0.10371 -0.262809 0.371145 0.678856
1 2 3 4
Weights -0.15825 -0.076183 0.298341 0.855888
1 2 3 4
Weights -0.17039 -0.017288 0.305664 0.785368
1 2 3 4
Weights -0.032295 -0.09653 0.340541 0.677644
1 2 3 4
Weights -0.012808 -0.052476 0.28308 0.717141
1 2 3 4
Weights -0.128861 -0.215211 0.118351 1.113698
1 2 3 4
Weights -0.098565 -0.06401 0.12425 0.919392
1 2 3 4
Weights -0.14249 0.102455 0.066075 0.912533
1 2 3 4
Weights 0.065655 -0.285342 0.304309 0.865765
1 2 3 4
Weights -0.073317 -0.280684 0.32631 0.940181
1 2 3 4
Weights -0.142418 0.038219 0.085639 0.943043
1 2 3 4
Weights -0.207308 -0.041345 0.335196 0.814874
1 2 3 4
Weights -0.028374 -0.226345 0.491865 0.692961
1 2 3 4
Weights -0.080706 -0.158375 0.200527 0.960518
1 2 3 4
Weights -0.016394 -0.124353 0.319456 0.769229
1 2 3 4
Weights -0.133893 0.020416 0.360506 0.653255
1 2 3 4
Weights -0.140065 -0.140432 0.265255 0.950562
1 2 3 4
Weights -0.104139 -0.107252 0.229987 0.880197
1 2 3 4
Weights -0.195781 -0.069816 0.216669 0.94942
1 2 3 4
Weights -0.214544 -0.128757 0.256784 0.997251
1 2 3 4
Weights -0.16037 -0.11492 0.153685 1.023577
1 2 3 4
Weights -0.221777 -0.163819 0.326647 0.960096
1 2 3 4
Weights -0.089778 -0.135533 0.201065 0.905062
1 2 3 4
Weights -0.125584 -0.181293 0.013866 1.213356
1 2 3 4
Weights -0.019365 -0.11979 0.245406 0.80118
1 2 3 4
Weights -0.163535 -0.143294 0.282095 0.937828
1 2 3 4
Weights -0.0407 -0.240993 0.326819 0.900722
1 2 3 4
Weights -0.202487 -0.193891 0.381398 0.901241
1 2 3 4
Weights -0.062713 0.278859 -0.538007 1.160583
1 2 3 4
Weights -0.208089 -0.135435 0.45353 0.748619
1 2 3 4
Weights 0.099728 -0.37421 0.63946 0.561694
1 2 3 4
Weights -0.147911 -0.223315 0.328781 0.924289
1 2 3 4
Weights -0.064857 -0.057518 0.321597 0.737255
1 2 3 4
Weights 0.16209 0.031063 -0.004958 0.53419
1 2 3 4
Weights -0.007037 -0.284018 0.384895 0.79564
1 2 3 4
Weights -0.048177 -0.322875 0.104892 1.212352
1 2 3 4
Weights -0.107267 -0.119918 0.258366 0.878488
1 2 3 4
Weights -0.037997 -0.35202 0.466327 0.823829
1 2 3 4
Weights -0.030031 -0.004559 0.05903 0.874914
1 2 3 4
Weights -0.228435 -0.063043 0.233921 0.967644
1 2 3 4
Weights -0.117999 -0.124718 0.246435 0.950663
1 2 3 4
Weights -0.093605 -0.006906 0.102649 0.876906
1 2 3 4
Weights -0.129545 -0.023044 0.18386 0.844481
1 2 3 4
Weights -0.092807 -0.131776 0.282139 0.89545
1 2 3 4
Weights -0.009498 -0.311705 0.412706 0.824386
1 2 3 4
Weights -0.128412 -0.118041 0.323202 0.825309
1 2 3 4
Weights -0.161916 -0.163053 0.330386 0.87085
1 2 3 4
Weights 0.010511 0.052463 0.053912 0.704616
1 2 3 4
Weights -0.126994 -0.19504 0.219551 1.019476
1 2 3 4
Weights -0.115298 -0.20497 0.425823 0.816829
1 2 3 4
Weights 0.002651 -0.348249 0.358977 0.939376
1 2 3 4
Weights -0.108273 -0.129958 0.033109 1.106449
1 2 3 4
Weights -0.044944 -0.26287 0.476066 0.758691
1 2 3 4
Weights -0.013013 -0.317016 0.426181 0.842591
1 2 3 4
Weights -0.159968 -0.172638 0.334498 0.881469
1 2 3 4
Weights -0.037341 0.013818 0.223824 0.635338
1 2 3 4
Weights 0.100711 -0.191716 0.329805 0.692442
1 2 3 4
Weights -0.208243 -0.148594 0.549474 0.67443
1 2 3 4
Weights -0.141898 -0.110924 0.232945 0.927262
1 2 3 4
Weights 0.381675 0.062528 0.063654 0.421444
1 2 3 4
Weights 0.029428 -0.45095 0.512332 0.853102
1 2 3 4
Weights -0.032708 -0.262768 0.405787 0.806008
1 2 3 4
Weights -0.083386 -0.227836 0.200279 1.03655
1 2 3 4
Weights 0.015592 -0.129297 0.162262 0.860778
1 2 3 4
Weights 0.025628 -0.33195 0.415192 0.805635
1 2 3 4
Weights 0.053593 0.12952 0.066834 0.606904
1 2 3 4
Weights -0.035522 -0.112468 0.319545 0.772909
1 2 3 4
Weights -0.090998 -0.277814 0.439076 0.858399
1 2 3 4
Weights -0.178236 -0.155317 0.165772 1.076691
1 2 3 4
Weights -0.03619 -0.406361 0.481499 0.863265
1 2 3 4
Weights -0.133475 -0.06212 0.189467 0.9403
1 2 3 4
Weights -0.172166 -0.05671 0.170552 0.962851
1 2 3 4
Weights -0.00556 -0.395592 0.505087 0.830839
1 2 3 4
Weights -0.000324 0.01582 0.15117 0.694887
1 2 3 4
Weights 0.128607 -0.114295 0.245675 0.676194
1 2 3 4
Weights -0.137674 -0.072332 0.140423 0.960372
1 2 3 4
Weights 0.024927 -0.328772 0.376807 0.868938
1 2 3 4
Weights -0.185035 -0.036549 0.217049 0.916408
1 2 3 4
Weights -0.243279 -0.045304 0.310472 0.889702
1 2 3 4
Weights -0.022376 -0.132982 0.280192 0.818175
1 2 3 4
Weights -0.182001 -0.180653 0.320769 0.925469
1 2 3 4
Weights -0.069672 -0.105255 0.324256 0.781141
1 2 3 4
Weights -0.016895 -0.23254 0.153256 0.958345
1 2 3 4
Weights 0.022481 -0.131577 0.21244 0.760523
1 2 3 4
Weights -0.248839 -0.142512 0.462355 0.841014
1 2 3 4
Weights 0.004716 -0.273489 0.594216 0.588842
1 2 3 4
Weights -0.121799 0.023751 0.167091 0.856263
1 2 3 4
Weights -0.095559 -0.27724 0.229249 1.051423
1 2 3 4
Weights -0.124513 -0.065585 0.129546 0.959842
1 2 3 4
Weights -0.039269 -0.211149 0.208379 0.955195
1 2 3 4
Weights -0.15437 -0.128932 0.363735 0.810107
1 2 3 4
Weights -0.176825 0.003974 0.257393 0.82991
1 2 3 4
Weights 0.245072 -0.111408 0.129108 0.683725
1 2 3 4
Weights -0.053653 -0.156746 0.180478 0.906685
1 2 3 4
Weights -0.063273 -0.211343 0.440294 0.752009
1 2 3 4
Weights -0.040329 -0.014287 0.176309 0.811266
1 2 3 4
Weights -0.049439 -0.244092 0.255596 0.968896
1 2 3 4
Weights -0.032799 -0.357149 0.425715 0.911441
1 2 3 4
Weights -0.195576 -0.099886 0.398822 0.794825
1 2 3 4
Weights -0.037229 -0.371153 0.460452 0.836582
1 2 3 4
Weights -0.187042 -0.108681 0.179253 1.012636
1 2 3 4
Weights -0.068056 -0.07185 0.216421 0.842565
1 2 3 4
Weights 0.04445 0.063898 0.055683 0.763813
1 2 3 4
Weights -0.180262 -0.021293 0.266451 0.829585
1 2 3 4
Weights -0.224734 -0.079961 0.329663 0.877935
1 2 3 4
Weights -0.02929 -0.015031 0.279392 0.715913
1 2 3 4
Weights -0.026334 -0.363024 0.227977 1.061936
1 2 3 4
Weights 0.002243 -0.027064 0.152631 0.79091
1 2 3 4
Weights 0.057708 -0.333153 0.392261 0.839584
1 2 3 4
Weights -0.049498 -0.313568 0.177703 1.071589
1 2 3 4
Weights -0.06031 -0.040706 0.315503 0.662942
1 2 3 4
Weights -0.07099 -0.313046 0.515653 0.799314
1 2 3 4
Weights -0.006761 0.031234 0.114174 0.710793
1 2 3 4
Weights -0.00267 -0.395135 0.335507 0.973531
1 2 3 4
Weights -0.145198 -0.114883 0.197498 0.956512
1 2 3 4
Weights -0.029364 -0.206871 0.378952 0.747648
1 2 3 4
Weights -0.030138 -0.321158 0.504516 0.778814
1 2 3 4
Weights -0.131325 -0.153309 0.135789 1.04814
1 2 3 4
Weights -0.17053 -0.024801 0.203526 0.921763
1 2 3 4
Weights -0.078537 -0.147354 0.07966 1.073456
1 2 3 4
Weights -0.035369 -0.413796 0.355409 0.991736
1 2 3 4
Weights -0.072281 -0.208971 0.047599 1.136149
1 2 3 4
Weights -0.144512 0.028178 0.14538 0.871143
1 2 3 4
Weights -0.142803 -0.1101 0.340716 0.799687
1 2 3 4
Weights -0.124621 -0.258389 0.456455 0.807841
1 2 3 4
Weights -0.124354 -0.224497 0.26651 0.990473
1 2 3 4
Weights -0.190709 -0.005547 0.131807 0.982056
1 2 3 4
Weights -0.077748 0.023033 0.323605 0.705532
1 2 3 4
Weights -0.035876 -0.18452 0.314834 0.865413
1 2 3 4
Weights -0.047262 -0.002229 0.226202 0.70838
1 2 3 4
Weights 0.06832 -0.209011 0.25416 0.776763
1 2 3 4
Weights -0.113635 -0.131823 0.146742 0.998626
1 2 3 4
Weights -0.228127 -0.106717 0.231812 1.012897
1 2 3 4
Weights -0.035312 -0.445716 0.497934 0.925608
1 2 3 4
Weights -0.142696 0.205387 0.273841 0.485771
1 2 3 4
Weights -0.05705 -0.229491 0.407891 0.805639
1 2 3 4
Weights -0.19431 -0.187309 0.349044 0.965446
1 2 3 4
Weights -0.118332 -0.061904 0.073043 1.051207
1 2 3 4
Weights 0.02993 -0.395992 0.469445 0.844986
1 2 3 4
Weights -0.106968 -0.078205 0.389156 0.667093
1 2 3 4
Weights -0.078667 0.038441 0.139088 0.796303
1 2 3 4
Weights 0.056119 0.025977 0.18918 0.671473
1 2 3 4
Weights -0.046847 -0.174123 0.269078 0.860805
1 2 3 4
Weights 0.052456 -0.173319 0.358314 0.687485
1 2 3 4
Weights -0.157725 0.068374 0.119818 0.880075
1 2 3 4
Weights -0.015844 0.003139 0.041231 0.858716
1 2 3 4
Weights -0.011016 -0.191651 0.379528 0.746708
1 2 3 4
Weights -0.078963 -0.334799 0.335658 0.972491
1 2 3 4
Weights -0.134622 -0.24039 0.262206 1.015514
1 2 3 4
Weights -0.001953 -0.096623 0.319345 0.715625
1 2 3 4
Weights -0.157859 -0.320383 0.430045 0.96115
Predicting...
t+1 t+2 t+3 t+4
R2 0.962349 -8.725618e+12 -7.042451e+04 0.554545
RMSE 70.726932 1.076693e+09 9.672951e+04 243.274130
MSE 5002.298976 1.159269e+18 9.356598e+09 59182.302352
MAE 0.352292 4.573697e+05 1.171233e+02 1.186832
MAPE 35.229176 4.573697e+07 1.171233e+04 118.683199
MPE -19.865985 -3.755629e+07 6.364637e+03 -84.651823
CPU times: user 49.4 s, sys: 1.91 s, total: 51.3 s
Wall time: 42.8 s
In [51]:
plot_diff_along_time(X_test, ar_preds_s)
In [52]:
j, s = 2, 100
plot_bispecific(X_test, baseline_preds, ar_preds, ar_preds_s, order, limit_t, j, s)
In [53]:
plot_qualitative_analysis(ar_preds, X_test, limit_t, order, subway_stations, del_hours)
In [54]:
plot_qualitative_analysis(ar_preds_s, X_test, limit_t, order, subway_stations, del_hours)
In [55]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="General baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of General baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [56]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [57]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])
x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(ar_scores).T[1], np.array(ar_scores_s).T[1])).T
baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["General baseline", "Baseline per station", "AR", "AR per station"]
arlineObjects = plt.plot(x, model_scores, linewidth=3)

# ['D', '*', '|', 'X']
# labels = ["Full baseline", "Baseline per station", "Baseline per day",
#                    "Baseline per station and day", "Full AR", "AR per station"]
for i, m in zip(range(4), ['D', '*']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)
    
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

AR + Baseline

In [58]:
def baseline_sub(X, baseline):
    
    return X.apply(lambda x: x - baseline.iloc[0], axis=(1, 2))
    
def baseline_add(X, baseline):
    
    return X.apply(lambda x: x + baseline.iloc[0], axis=(1, 2))
    
In [59]:
order, limit_t = 4, 4
In [60]:
# Minus Baseline per station
Xb_train = baseline_sub(X_train, baseline_preds[1])
Xb_test = baseline_sub(X_test, baseline_preds[1])

# Minus Baseline per station
Xb_train_s = baseline_sub(X_train, baseline_preds[1])
Xb_test_s = baseline_sub(X_test, baseline_preds[1])
In [61]:
Xb_min_max_s = Xb_train_s.apply(lambda x: (x.min().min(), x.max().max()), axis=(0, 2))
In [62]:
Xbn_train_s = Xb_train_s.apply(lambda x: a + ((x - x.min().min()) * (b - a)) / (x.max().max() - x.min().min()), 
                               axis=(0, 2)).transpose(2, 0, 1)
# Normalize Xb_test
Xbn_test_s = pd.Panel(np.array(list(map(lambda station_id: normalize(
    Xb_test_s, Xb_min_max_s, station_id, a=a, b=b), 
                                        Xb_test_s.transpose(1, 0, 2)))).transpose(2, 0, 1),
                      items=list(Xb_test_s.items),
                      major_axis=subway_stations,
                      minor_axis=generate_times("15min")[(del_hours * 4):])

# Denormalize Xb_test
Xbdn_test_s = pd.Panel(np.array(list(map(lambda station_id: denormalize(
    Xbn_test_s, Xb_min_max_s, station_id, a=a, b=b), 
                                        Xb_test_s.transpose(1, 0, 2)))).transpose(2, 0, 1),
                      items=list(Xb_test_s.items),
                      major_axis=subway_stations,
                      minor_axis=generate_times("15min")[(del_hours * 4):])
In [63]:
%%time
arb_preds, arb_scores = ar_plot_results(None, order, limit_t, X_train=Xbn_train_s, 
                                      X_test=Xbn_test_s, X_min_max=Xb_min_max_s, a=a, b=b)
for t in range(limit_t):
    arb_preds[t] = baseline_add(arb_preds[t], baseline_preds[1])
Fitting...
1 2 3 4
Weights -0.02997 -0.007437 0.287751 0.695926
Predicting...
t+1 t+2 t+3 t+4
R2 8.459523e-01 -1.609069e+00 -1.107381e+00 -2.174787e-01
RMSE 5.721493e+01 2.354640e+02 2.116183e+02 1.608468e+02
MSE 3.273548e+03 5.544328e+04 4.478230e+04 2.587169e+04
MAE -2.346982e+10 -6.608786e+10 -5.697795e+10 -3.679000e+10
MAPE -2.346982e+12 -6.608786e+12 -5.697795e+12 -3.679000e+12
MPE 1.559500e+12 -1.148078e+12 -5.894448e+11 8.337539e+11
CPU times: user 4.96 s, sys: 2.27 s, total: 7.22 s
Wall time: 6.31 s
In [64]:
plot_diff_along_time(X_test, arb_preds)
In [65]:
%%time
arb_preds_s, arb_scores_s = ar_plot_results("s", order, limit_t,  
                                          X_train=Xbn_train_s, 
                                          X_test=Xbn_test_s,
                                          X_min_max=Xb_min_max_s, a=a, b=b)
for t in range(limit_t):
    arb_preds_s[t] = baseline_add(arb_preds_s[t], baseline_preds[1]) 
Fitting...
1 2 3 4
Weights -0.151558 0.102607 0.322026 0.601723
1 2 3 4
Weights 0.041706 0.073316 0.191044 0.529377
1 2 3 4
Weights -0.135022 -0.130593 0.250507 0.90533
1 2 3 4
Weights -0.031072 -0.044133 0.389985 0.573815
1 2 3 4
Weights 0.142115 0.090334 0.160761 0.562643
1 2 3 4
Weights -0.110814 -0.059495 0.192758 0.865118
1 2 3 4
Weights -0.028911 0.042138 0.312355 0.600317
1 2 3 4
Weights -0.161928 -0.029374 0.258065 0.82988
1 2 3 4
Weights -0.133476 -0.039783 0.206957 0.87954
1 2 3 4
Weights -0.082662 -0.161168 0.109018 1.038095
1 2 3 4
Weights -0.081539 -0.122675 0.520434 0.556939
1 2 3 4
Weights 0.008419 0.023854 0.23796 0.616796
1 2 3 4
Weights 0.330964 -0.107073 0.289914 0.401104
1 2 3 4
Weights 0.030721 -0.036177 0.370298 0.566179
1 2 3 4
Weights 0.028146 -0.042285 0.246306 0.710248
1 2 3 4
Weights -0.065209 -0.070725 0.198175 0.844521
1 2 3 4
Weights -0.064546 0.00717 0.237114 0.734168
1 2 3 4
Weights 0.060565 0.15032 0.233907 0.438679
1 2 3 4
Weights -0.074263 0.051188 0.314403 0.625617
1 2 3 4
Weights -0.042034 -0.192672 0.257845 0.878572
1 2 3 4
Weights -0.127013 -0.018267 0.273906 0.765638
1 2 3 4
Weights -0.114608 -0.058098 0.278996 0.774814
1 2 3 4
Weights -0.123301 -0.128929 0.234812 0.900728
1 2 3 4
Weights -0.174497 0.049936 0.039826 1.01033
1 2 3 4
Weights 0.006991 -0.06234 0.357582 0.636177
1 2 3 4
Weights 0.073911 -0.02157 0.273259 0.587016
1 2 3 4
Weights -0.105408 0.115143 0.31034 0.572212
1 2 3 4
Weights 0.064441 -0.022938 0.331934 0.54844
1 2 3 4
Weights 0.0006 0.021808 0.285301 0.566521
1 2 3 4
Weights 0.09071 -0.205097 0.288281 0.744782
1 2 3 4
Weights -0.123155 -0.144028 0.388214 0.8205
1 2 3 4
Weights -0.077814 -0.141947 0.209549 0.908939
1 2 3 4
Weights -0.033857 -0.021773 0.21141 0.750786
1 2 3 4
Weights -0.021585 -0.094092 0.285667 0.746826
1 2 3 4
Weights 0.232502 0.103579 0.068336 0.484251
1 2 3 4
Weights 0.034626 -0.059489 0.310781 0.653544
1 2 3 4
Weights 0.024371 0.091974 0.170086 0.609767
1 2 3 4
Weights -0.055857 0.012771 0.238266 0.6705
1 2 3 4
Weights -0.025987 -0.138191 0.244551 0.854979
1 2 3 4
Weights -0.087056 -0.183707 0.337376 0.867254
1 2 3 4
Weights 0.163586 0.129079 0.230739 0.34979
1 2 3 4
Weights 0.142299 0.10757 0.043735 0.590122
1 2 3 4
Weights -0.000269 0.154696 0.240995 0.520317
1 2 3 4
Weights -0.038769 0.035733 0.217108 0.662544
1 2 3 4
Weights -0.073953 0.019322 0.30043 0.666164
1 2 3 4
Weights -0.064199 -0.304735 0.339604 0.931938
1 2 3 4
Weights -0.009867 0.033798 0.205712 0.666018
1 2 3 4
Weights -0.115888 -0.042485 0.228415 0.859431
1 2 3 4
Weights -0.103669 -0.086075 0.409039 0.695837
1 2 3 4
Weights 0.279947 0.1414 0.120786 0.378696
1 2 3 4
Weights -0.056807 -0.076467 0.151931 0.913383
1 2 3 4
Weights 0.024659 0.052972 0.301532 0.496113
1 2 3 4
Weights -0.044313 0.03778 0.28864 0.64172
1 2 3 4
Weights 0.038347 -0.174266 0.255993 0.817104
1 2 3 4
Weights -0.066896 0.222874 0.372744 0.342339
1 2 3 4
Weights 0.146284 0.231238 0.068217 0.409804
1 2 3 4
Weights -0.170109 -0.0242 0.248691 0.864876
1 2 3 4
Weights 0.131925 -0.012648 0.170191 0.596569
1 2 3 4
Weights -0.01406 -0.227056 0.194473 0.92864
1 2 3 4
Weights 0.019684 0.026681 0.265989 0.588421
1 2 3 4
Weights -0.086181 -0.052615 0.175391 0.880703
1 2 3 4
Weights 0.19486 0.109211 0.124588 0.456077
1 2 3 4
Weights 0.209718 0.053946 0.233974 0.430801
1 2 3 4
Weights -0.081314 0.114184 0.357417 0.493347
1 2 3 4
Weights 0.146211 0.037265 -0.06085 0.782657
1 2 3 4
Weights -0.103496 -0.128212 0.146924 0.991409
1 2 3 4
Weights -0.133481 0.047657 0.314221 0.629858
1 2 3 4
Weights -0.159652 -0.103037 0.152388 1.032889
1 2 3 4
Weights 0.098934 0.042118 0.124665 0.659295
1 2 3 4
Weights -0.100917 -0.141201 0.13915 0.980194
1 2 3 4
Weights -0.006025 0.16342 0.32425 0.38821
1 2 3 4
Weights -0.157787 0.00592 0.45189 0.610002
1 2 3 4
Weights -0.087775 -0.022848 0.096467 0.934897
1 2 3 4
Weights -0.093056 0.050131 0.3961 0.488717
1 2 3 4
Weights 0.138403 0.108002 0.246236 0.410265
1 2 3 4
Weights 0.27211 0.074957 0.064607 0.323308
1 2 3 4
Weights -0.020268 -0.112834 0.485786 0.58877
1 2 3 4
Weights -0.105679 -0.006017 0.375809 0.666016
1 2 3 4
Weights -0.165855 -0.0314 0.306357 0.787185
1 2 3 4
Weights 0.035608 0.102922 0.224701 0.481706
1 2 3 4
Weights -0.059948 0.038751 0.301804 0.602339
1 2 3 4
Weights -0.057999 -0.211968 0.356136 0.813837
1 2 3 4
Weights -0.090209 -0.002306 0.051968 0.938411
1 2 3 4
Weights 0.095461 -0.004522 0.055382 0.753705
1 2 3 4
Weights -0.145007 -0.06302 0.227848 0.883657
1 2 3 4
Weights -0.091667 -0.152019 0.306845 0.814158
1 2 3 4
Weights -0.174102 0.002888 0.369502 0.713418
1 2 3 4
Weights -0.049197 0.060146 0.218152 0.66191
1 2 3 4
Weights -0.138325 -0.084012 0.334861 0.786313
1 2 3 4
Weights -0.006345 0.014071 0.351431 0.535019
1 2 3 4
Weights -0.101724 -0.17953 0.205889 0.982587
1 2 3 4
Weights -0.081718 -0.219656 0.331621 0.872073
1 2 3 4
Weights -0.098332 0.004909 0.299049 0.674658
1 2 3 4
Weights -0.014306 -0.050175 0.234944 0.768784
1 2 3 4
Weights -0.087497 -0.097828 0.355432 0.721445
1 2 3 4
Weights -0.034128 -0.085276 0.292345 0.747642
1 2 3 4
Weights -0.163516 -0.077027 0.213886 0.917957
1 2 3 4
Weights -0.211266 -0.156702 0.44689 0.823182
1 2 3 4
Weights -0.073189 -0.062636 0.372848 0.686651
1 2 3 4
Weights -0.053298 -0.261212 0.45872 0.769659
1 2 3 4
Weights -0.042315 -0.082144 0.408387 0.633205
1 2 3 4
Weights 0.10406 0.006269 0.327012 0.493616
1 2 3 4
Weights -0.09996 0.011771 0.248274 0.742215
1 2 3 4
Weights -0.032106 -0.037823 0.230356 0.782455
1 2 3 4
Weights -0.002594 0.053935 0.148068 0.705889
1 2 3 4
Weights -0.071819 0.037002 0.174829 0.771685
1 2 3 4
Weights 0.027108 0.073774 0.237519 0.527074
1 2 3 4
Weights -0.17575 -0.095065 0.319119 0.850774
1 2 3 4
Weights -0.003475 0.084535 0.324177 0.488881
1 2 3 4
Weights -0.171262 0.006962 0.434752 0.606821
1 2 3 4
Weights -0.037645 0.095093 0.218427 0.609201
1 2 3 4
Weights -0.054119 -0.172073 0.467368 0.70887
1 2 3 4
Weights 0.07251 0.056277 0.324694 0.427193
1 2 3 4
Weights -0.041282 0.015196 0.165544 0.721604
1 2 3 4
Weights -0.109148 0.050325 0.248617 0.733931
1 2 3 4
Weights -0.019754 0.035493 0.285094 0.627787
1 2 3 4
Weights -0.074027 0.008725 0.251215 0.711182
1 2 3 4
Weights 0.046013 0.071709 0.262896 0.449969
1 2 3 4
Weights -0.086061 -0.066091 0.275091 0.738925
1 2 3 4
Weights 0.093937 -0.164092 0.247882 0.741627
1 2 3 4
Weights -0.11915 0.057285 0.265082 0.671072
1 2 3 4
Weights 0.011049 -0.099956 0.274566 0.708292
1 2 3 4
Weights -0.10023 -0.123662 0.365368 0.778231
1 2 3 4
Weights 0.155425 0.210131 0.082626 0.503319
1 2 3 4
Weights -0.072049 -0.010104 0.275715 0.724749
1 2 3 4
Weights 0.067417 0.070596 0.22334 0.566802
1 2 3 4
Weights 0.009285 -0.001087 0.3675 0.553527
1 2 3 4
Weights -0.05797 -0.13958 0.502899 0.634769
1 2 3 4
Weights -0.068474 -0.130223 0.320702 0.78545
1 2 3 4
Weights 0.214124 0.178319 0.066477 0.457221
1 2 3 4
Weights -0.126699 -0.02769 0.25708 0.789364
1 2 3 4
Weights -0.088993 -0.163904 0.337587 0.849452
1 2 3 4
Weights -0.057232 0.075208 0.136122 0.761927
1 2 3 4
Weights 0.102179 -0.146909 0.304346 0.630047
1 2 3 4
Weights -0.039102 0.030124 0.214324 0.753349
1 2 3 4
Weights -0.15802 -0.123562 0.313123 0.855601
1 2 3 4
Weights 0.025471 -0.012048 0.264894 0.662349
1 2 3 4
Weights 0.093436 0.080119 0.202577 0.519442
1 2 3 4
Weights 0.058889 -0.006286 0.159288 0.705727
1 2 3 4
Weights -0.17844 -0.073602 0.244737 0.902646
1 2 3 4
Weights -0.051088 0.029965 0.240842 0.700386
1 2 3 4
Weights -0.135808 -0.01957 0.30163 0.732022
1 2 3 4
Weights -0.144302 -0.018847 0.305812 0.755223
1 2 3 4
Weights 0.097015 0.011277 0.23231 0.54798
1 2 3 4
Weights -0.036868 0.075414 0.297563 0.574904
1 2 3 4
Weights -0.039481 -0.047052 0.275727 0.748387
1 2 3 4
Weights -0.1199 0.002019 0.159226 0.874048
1 2 3 4
Weights -0.040132 -0.14469 0.41152 0.700602
1 2 3 4
Weights -0.188986 -0.033629 0.426532 0.707989
1 2 3 4
Weights -0.051117 -0.069268 0.202685 0.834437
1 2 3 4
Weights 0.148618 -0.110545 0.32427 0.538748
1 2 3 4
Weights -0.074811 0.046766 0.358784 0.580302
1 2 3 4
Weights -0.093824 0.056659 0.361218 0.586619
1 2 3 4
Weights 0.037503 0.046425 0.320908 0.433889
1 2 3 4
Weights 0.106388 0.060823 0.227957 0.538444
1 2 3 4
Weights -0.196461 -0.069495 0.232181 0.924986
1 2 3 4
Weights -0.065101 -0.009502 0.186195 0.776962
1 2 3 4
Weights -0.018101 0.106673 0.184192 0.668408
1 2 3 4
Weights 0.055547 -0.153588 0.283512 0.76108
1 2 3 4
Weights -0.080843 -0.107799 0.302413 0.787889
1 2 3 4
Weights -0.086512 0.076501 0.162654 0.749818
1 2 3 4
Weights -0.146135 0.067748 0.311458 0.656482
1 2 3 4
Weights 0.034526 -0.004364 0.361496 0.536057
1 2 3 4
Weights -0.080265 -0.018223 0.247781 0.74828
1 2 3 4
Weights 0.042772 0.066479 0.249441 0.583874
1 2 3 4
Weights 0.017642 0.121519 0.338542 0.413955
1 2 3 4
Weights -0.118952 0.004571 0.30575 0.727382
1 2 3 4
Weights -0.070599 0.024016 0.240031 0.694749
1 2 3 4
Weights -0.144538 0.010519 0.273951 0.769943
1 2 3 4
Weights -0.208162 -0.033623 0.344006 0.809296
1 2 3 4
Weights -0.186725 -0.031015 0.274562 0.847297
1 2 3 4
Weights -0.230601 -0.087063 0.453658 0.764597
1 2 3 4
Weights -0.063967 -0.017967 0.253571 0.705947
1 2 3 4
Weights -0.171025 -0.074456 0.162063 1.011314
1 2 3 4
Weights 0.03348 0.004785 0.217136 0.644447
1 2 3 4
Weights -0.126152 -0.03858 0.340357 0.737439
1 2 3 4
Weights -0.055131 -0.061155 0.288083 0.783386
1 2 3 4
Weights -0.17896 -0.060321 0.444982 0.689485
1 2 3 4
Weights 0.017322 0.233299 -0.482099 1.0497
1 2 3 4
Weights -0.146121 0.025878 0.430259 0.562335
1 2 3 4
Weights 0.167569 -0.178639 0.500058 0.447829
1 2 3 4
Weights -0.177532 -0.037077 0.338651 0.756292
1 2 3 4
Weights 0.009765 0.062895 0.289196 0.543815
1 2 3 4
Weights 0.131413 0.014758 -0.017518 0.4536
1 2 3 4
Weights 0.01678 -0.098294 0.318857 0.655304
1 2 3 4
Weights -0.10864 -0.221857 0.164873 1.118532
1 2 3 4
Weights -0.066749 0.026039 0.265614 0.684496
1 2 3 4
Weights -0.050241 -0.118996 0.398607 0.684868
1 2 3 4
Weights 0.049827 0.033433 0.136562 0.684619
1 2 3 4
Weights -0.184178 0.001392 0.323188 0.771285
1 2 3 4
Weights -0.029428 0.016325 0.207542 0.745107
1 2 3 4
Weights -0.067862 0.062636 0.16706 0.726673
1 2 3 4
Weights -0.046283 0.06591 0.206534 0.663517
1 2 3 4
Weights 0.00742 0.054824 0.197227 0.691347
1 2 3 4
Weights -0.025602 -0.094326 0.342466 0.697986
1 2 3 4
Weights -0.101536 0.032612 0.297162 0.66986
1 2 3 4
Weights -0.156677 -0.004 0.347122 0.689922
1 2 3 4
Weights 0.098101 0.084755 0.063943 0.52841
1 2 3 4
Weights -0.153657 -0.053952 0.276009 0.844314
1 2 3 4
Weights -0.112273 -0.004811 0.36563 0.667836
1 2 3 4
Weights -0.059322 -0.146441 0.338474 0.818822
1 2 3 4
Weights -0.109553 -0.094701 0.172907 0.933739
1 2 3 4
Weights -0.048359 -0.058952 0.396922 0.638556
1 2 3 4
Weights -0.014973 -0.163251 0.398017 0.726214
1 2 3 4
Weights -0.168255 -0.014452 0.351568 0.708619
1 2 3 4
Weights 0.046919 0.095794 0.226101 0.433765
1 2 3 4
Weights 0.097505 -0.056296 0.257258 0.633515
1 2 3 4
Weights -0.136972 0.038756 0.463735 0.513926
1 2 3 4
Weights -0.112959 -0.053637 0.313398 0.759777
1 2 3 4
Weights 0.352551 0.110278 0.065152 0.400091
1 2 3 4
Weights -0.015293 -0.318979 0.495257 0.790883
1 2 3 4
Weights -0.04541 -0.081627 0.361356 0.69198
1 2 3 4
Weights -0.134886 -0.112043 0.322206 0.860461
1 2 3 4
Weights 0.042781 -0.020693 0.197421 0.69008
1 2 3 4
Weights 0.03872 -0.138565 0.358928 0.652774
1 2 3 4
Weights 0.116964 0.135352 0.137577 0.450523
1 2 3 4
Weights 0.066153 0.044411 0.224366 0.574778
1 2 3 4
Weights -0.113432 -0.092841 0.401302 0.739097
1 2 3 4
Weights -0.181637 -0.057896 0.308046 0.851821
1 2 3 4
Weights -0.070598 -0.189338 0.400637 0.771367
1 2 3 4
Weights -0.074021 0.02702 0.248369 0.711709
1 2 3 4
Weights -0.125641 0.004056 0.255887 0.768467
1 2 3 4
Weights -0.056027 -0.171023 0.445345 0.717427
1 2 3 4
Weights 0.061299 0.087747 0.095285 0.566967
1 2 3 4
Weights 0.175714 -0.010507 0.256409 0.512148
1 2 3 4
Weights -0.07689 0.003517 0.147879 0.813817
1 2 3 4
Weights -0.006247 -0.217338 0.37197 0.799256
1 2 3 4
Weights -0.142621 0.036745 0.253994 0.763749
1 2 3 4
Weights -0.185144 0.044118 0.38992 0.669736
1 2 3 4
Weights -0.00568 0.024929 0.275301 0.650002
1 2 3 4
Weights -0.151996 -0.07567 0.36204 0.750971
1 2 3 4
Weights -0.001826 0.046604 0.277581 0.614327
1 2 3 4
Weights 0.007121 -0.156173 0.225985 0.772841
1 2 3 4
Weights 0.023434 0.009742 0.189407 0.663888
1 2 3 4
Weights -0.183734 -0.008558 0.453393 0.658555
1 2 3 4
Weights 0.094547 -0.102354 0.421323 0.506197
1 2 3 4
Weights -0.019632 0.096522 0.199619 0.646599
1 2 3 4
Weights -0.152482 -0.127711 0.325126 0.862974
1 2 3 4
Weights -0.111317 0.014438 0.214141 0.787043
1 2 3 4
Weights -0.041298 -0.115159 0.252287 0.82968
1 2 3 4
Weights -0.076613 0.000842 0.336061 0.62634
1 2 3 4
Weights -0.087171 0.04346 0.373342 0.576088
1 2 3 4
Weights 0.20186 -0.001082 0.156539 0.572299
1 2 3 4
Weights -0.052946 -0.071253 0.242287 0.768434
1 2 3 4
Weights -0.003712 -0.023749 0.354397 0.594607
1 2 3 4
Weights 0.049447 0.058464 0.173254 0.623336
1 2 3 4
Weights -0.061737 -0.09824 0.254984 0.818112
1 2 3 4
Weights -0.044583 -0.140443 0.361861 0.772865
1 2 3 4
Weights -0.111793 0.012124 0.380587 0.631838
1 2 3 4
Weights -0.064076 -0.139109 0.400089 0.704795
1 2 3 4
Weights -0.170851 -0.024113 0.273409 0.816172
1 2 3 4
Weights 0.002585 0.033135 0.228791 0.645988
1 2 3 4
Weights 0.109373 0.089104 0.113537 0.592631
1 2 3 4
Weights -0.055323 0.049956 0.297245 0.617811
1 2 3 4
Weights -0.150545 0.014271 0.376916 0.667206
1 2 3 4
Weights 0.105559 0.098751 0.197886 0.500678
1 2 3 4
Weights -0.105709 -0.166715 0.268138 0.91009
1 2 3 4
Weights 0.018426 0.073049 0.166672 0.669638
1 2 3 4
Weights 0.056508 -0.133286 0.357766 0.66337
1 2 3 4
Weights -0.150546 -0.062469 0.189249 0.915514
1 2 3 4
Weights 0.031289 0.066664 0.276832 0.482444
1 2 3 4
Weights -0.044703 -0.073223 0.378138 0.669938
1 2 3 4
Weights 0.061381 0.083409 0.095808 0.588907
1 2 3 4
Weights -0.070719 -0.204416 0.341202 0.85395
1 2 3 4
Weights -0.119952 -0.028597 0.2173 0.8179
1 2 3 4
Weights -0.039813 -0.002958 0.315426 0.624105
1 2 3 4
Weights -0.053681 -0.096203 0.429209 0.656862
1 2 3 4
Weights -0.125434 -0.079634 0.237113 0.874122
1 2 3 4
Weights -0.112289 0.063559 0.286518 0.693465
1 2 3 4
Weights -0.086079 -0.101565 0.15655 0.968775
1 2 3 4
Weights -0.133643 -0.145532 0.338249 0.844701
1 2 3 4
Weights -0.124586 -0.123363 0.175634 0.980445
1 2 3 4
Weights -0.062992 0.110964 0.180232 0.671907
1 2 3 4
Weights -0.095665 0.022676 0.318135 0.648432
1 2 3 4
Weights -0.10493 -0.070022 0.392863 0.674295
1 2 3 4
Weights -0.160011 -0.043404 0.264103 0.845823
1 2 3 4
Weights -0.097158 0.057196 0.174167 0.780712
1 2 3 4
Weights -0.058491 0.041906 0.31307 0.680826
1 2 3 4
Weights 0.099661 0.021931 0.222917 0.616257
1 2 3 4
Weights -0.012176 0.096964 0.236719 0.538271
1 2 3 4
Weights 0.075484 -0.060216 0.196788 0.6686
1 2 3 4
Weights -0.1067 -0.042557 0.248283 0.799475
1 2 3 4
Weights -0.215974 -0.021202 0.344308 0.8114
1 2 3 4
Weights -0.096942 -0.253794 0.463976 0.833064
1 2 3 4
Weights 0.018116 0.169106 0.247754 0.354267
1 2 3 4
Weights 0.01466 -0.04532 0.286857 0.649215
1 2 3 4
Weights -0.18857 -0.057004 0.353448 0.827163
1 2 3 4
Weights -0.023456 -0.017528 0.14072 0.837238
1 2 3 4
Weights -0.034142 -0.211253 0.452698 0.738741
1 2 3 4
Weights -0.053769 0.035407 0.416388 0.481203
1 2 3 4
Weights -0.007887 0.121737 0.16108 0.618585
1 2 3 4
Weights 0.097325 0.123376 0.179632 0.533348
1 2 3 4
Weights -0.04582 -0.011302 0.277484 0.691321
1 2 3 4
Weights 0.094 -0.001849 0.296214 0.536416
1 2 3 4
Weights -0.06219 0.070758 0.199523 0.693788
1 2 3 4
Weights 0.065067 0.061018 0.080133 0.656692
1 2 3 4
Weights 0.06304 -0.031773 0.273995 0.611593
1 2 3 4
Weights -0.141156 -0.127444 0.351497 0.816942
1 2 3 4
Weights -0.141454 -0.14309 0.334545 0.844515
1 2 3 4
Weights 0.104398 0.063477 0.247888 0.510529
1 2 3 4
Weights -0.180903 -0.11672 0.39546 0.829254
Predicting...
t+1 t+2 t+3 t+4
R2 8.546575e-01 -4.768467e+08 -4.403893e+04 -7.969461e-03
RMSE 5.557481e+01 3.183252e+06 3.059179e+04 1.463543e+02
MSE 3.088559e+03 1.013309e+13 9.358579e+08 2.141957e+04
MAE -1.662754e+10 -1.277954e+13 -2.433361e+11 -9.831762e+07
MAPE -1.662754e+12 -1.277954e+15 -2.433361e+13 -9.831762e+09
MPE 1.293655e+12 -1.265700e+15 -1.973984e+13 6.247137e+09
CPU times: user 48 s, sys: 1.8 s, total: 49.8 s
Wall time: 40.6 s
In [66]:
plot_diff_along_time(X_test, arb_preds_s)
In [67]:
plot_bispecific(X_test, baseline_preds, arb_preds, arb_preds_s, order, limit_t, j, s)
In [68]:
plot_qualitative_analysis(arb_preds, X_test, limit_t, order, subway_stations, del_hours)
In [69]:
plot_qualitative_analysis(arb_preds_s, X_test, limit_t, order, subway_stations, del_hours)
In [70]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(arb_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR + baseline")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="General baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of full baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [71]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(arb_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station + baseline")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.ylim((0, 1000))
plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [72]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])

x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(arb_scores).T[1], np.array(arb_scores_s).T[1])).T

baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["General baseline", "Baseline per station", "AR + baseline", "AR per station + baseline"]

arlineObjects = plt.plot(x, model_scores, linewidth=3)

for i, m in zip(range(4), ['D', '*']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)

plt.ylim((0, model_scores[:, 0].max()))
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

AR per t

In [73]:
from cost_functions import mse, mse_g
from sklearn.linear_model import LinearRegression, Lasso

class myARt(Regressor):
    def __init__(self, order=4, tplus=1, level=None, loss=mse, loss_g=mse_g, max_iter=1000,
                 eps=0.01):
        """ Initialisation des paramètres du perceptron

        :param order: Taille de la fenêtre glissante
        :param loss: fonction de coût
        :param loss_g: gradient de la fonction coût
        :param max_iter: nombre maximum d'itération de la fonction coût
        :param eps: pas du gradient


        """

        self.order = order
        self.level = level
        self.tplus = tplus              
    
    @Regressor.datax_t_decorator
    def fit(self, datax, tplus):
        """ Finds the optimal weigths analytically 
        
        :param datax: contient tous les exemples du dataset
        :returns: void
        :rtype: None
        
        """
        
        self.reg = LinearRegression()
        _, self.X, self.y = datax
       
        self.reg.fit(self.X, self.y)
        self.w = self.reg.coef_.squeeze()
        display(HTML(pd.DataFrame(self.w.reshape(1, -1), index=['Weights'], 
                                  columns=range(1, len(self.w)+1)).to_html()))
        return self

    
    def reshaped(self, y_pred, datax):
        """
        """
        
        if datax.ndim == 3:
            return y_pred.reshape((datax.shape[0] * datax.shape[1], 
                                datax.shape[2] - self.order - self.tplus + 1), 
                                order='F').reshape((datax.shape[0],
                                                   datax.shape[1],
                                                   datax.shape[2] - self.order - self.tplus + 1))
        elif datax.ndim == 2:
            return y_pred.reshape((datax.shape[0], datax.shape[1] - self.order - self.tplus + 1),
                                order='F')
    
    @Regressor.datax_t_decorator
    def forecast(self, datax, tplus):
        """
        
        """
        
        datax, self.X_test, self.y_test = datax
        
        return self.reshaped(self.reg.predict(self.X_test), datax)
In [74]:
class theARt(Baseline):
    station_id = 0
    def __init__(self, order, tplus,  level=None, first_ndays=7):
        """
        
        """
        
        super().__init__(level, first_ndays)
        self.level = level
        self.order = order
        self.tplus = tplus
        
    def fit(self, datax):
        """
        
        """
        
        if self.level is None:
            self.model = myARt(order=self.order, tplus=self.tplus, level=self.level)
            self.model.fit(datax, self.tplus)
            
        elif self.level.lower() == "s":
            
            self.models = []            
            
            datax.apply(lambda station: self.models.append(
                myARt(order=self.order, tplus=self.tplus, level=self.level).fit(station.T, self.tplus)), 
                        axis=(0, 2))
        
        elif self.level.lower() == "j":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean().mean(axis=1))      
                
        elif self.level.lower() == "sj":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean(axis=0))
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def predict(self, datax, tplus=None):
        """
        
        """
        
        def predict_for_station(x, tplus):
            """
            """
            
            station_pred = self.models[self.station_id].forecast(x, tplus)
            self.station_id += 1
            
            return station_pred
        
        if self.level is None:
            
            X_pred = self.model.forecast(datax, tplus)
            return panelIt(X_pred, datax, self.model.order + tplus - 1, subway_stations, del_hours)
            
        elif self.level.lower() == "s":
            
            X_pred = datax.apply(lambda x: predict_for_station(x.T, tplus), 
                                 axis=(0, 2)).transpose(1, 0, 2)
            self.station_id = 0
            
            return panelIt(X_pred.values, datax, self.models[0].order + tplus - 1, subway_stations, del_hours)
        
        elif self.level.lower() == "j":
            # TODO
            pass
        elif self.level.lower() == "sj":
            # TODO
            pass
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def score(self, datax, X_pred, level):
        """
        """
        
        if level == None:
            self.scores = super().metrics_score(
                datax.iloc[:, :, self.model.order + self.tplus - 1:], X_pred.values)
        elif level == 's':
            self.scores = super().metrics_score(
                datax.iloc[:, :, self.models[0].order + self.tplus - 1:], X_pred.values)
            
        return self.scores
    
In [75]:
def art_plot_results(level, order, limit_t, X_train=Xn_train, X_test=Xn_test, 
                    X_min_max=X_min_max, a=a, b=b):
    """
    
    """
    
    ar_scores = []
    ar_preds = []
    
    Xdn_test = pd.Panel(np.array(list(map(lambda station_id: 
                                          denormalize(X_test, 
                                                      X_min_max, 
                                                      station_id, a=a, b=b), 
                                     X_test.transpose(1, 0, 2)))).transpose(2, 0, 1),
                   items=list(X_test.items),
                   major_axis=subway_stations,
                   minor_axis=generate_times("15min")[(del_hours * 4):])
    
    print("Predicting...")

    for t in range(1, limit_t+1):
        
        ar = theARt(level=level, order=order, tplus=t)

        print("Fitting t+{}...".format(t))
        ar.fit(X_train)
        
        print("Predicting t+{}...".format(t))
        X_pred = ar.predict(X_test, t)
        
        Xdn_pred = pd.Panel(np.array(list(map(lambda station_id: 
                                              denormalize(X_pred, 
                                                         X_min_max, 
                                                         station_id, a=a, b=b),
                                              X_pred.transpose(1, 0, 2)))).transpose(2, 0, 1),
                                     items=list(X_pred.items),
                                     major_axis=list(X_pred.major_axis),
                                     minor_axis=list(X_pred.minor_axis))
        
        ar_preds.append(Xdn_pred)
        
        ar_scores.append(ar.score(Xdn_test, Xdn_pred, level))
    
    display(HTML((pd.DataFrame(np.array(ar_scores).T, 
                               index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'], 
                               columns=list(map(
                                   lambda x: "t+"+str(x),
                                   range(1, len(ar_scores)+1))))).to_html()))
    
    
    return ar_preds, ar_scores

def plot_diff_along_time_per_t(X_test, ar_preds):
    
    for t in range(len(ar_preds)):
        res = []
        order = X_test.shape[2] - ar_preds[t].shape[2]    
        
        res.append(np.sqrt(((X_test.iloc[:, :, order:].values - 
                             ar_preds[t].values)**2).mean(axis=(0, 1))))

        pd_res = pd.DataFrame(np.array(res).T, index=list(ar_preds[t].minor_axis),
                              columns=['t+{}'.format(t)])
        pd_res.plot(figsize=(16, 2), 
                    title='Plot of RMSE between predicted and real values along days')
In [76]:
order, limit_t = 4, 4
In [77]:
%%time
art_preds, art_scores = art_plot_results(None, order=order, limit_t=limit_t)
Predicting...
Fitting t+1...
1 2 3 4
Weights -0.07191 -0.14902 0.289492 0.849837
Predicting t+1...
Fitting t+2...
1 2 3 4
Weights -0.301045 0.010542 0.161812 0.959116
Predicting t+2...
Fitting t+3...
1 2 3 4
Weights -0.263309 -0.276912 0.343574 0.93255
Predicting t+3...
Fitting t+4...
1 2 3 4
Weights -0.258649 -0.240417 0.048574 1.092446
Predicting t+4...
t+1 t+2 t+3 t+4
R2 0.957309 0.917455 0.856489 0.791831
RMSE 75.311650 105.280642 139.593979 169.079444
MSE 5671.844689 11084.013506 19486.478883 28587.858256
MAE 0.363100 0.521453 0.667529 0.805648
MAPE 36.310028 52.145262 66.752910 80.564812
MPE -21.190840 -34.007218 -45.795876 -57.215237
CPU times: user 6.72 s, sys: 4.38 s, total: 11.1 s
Wall time: 10.6 s
In [78]:
plot_diff_along_time_per_t(X_test, art_preds)
In [79]:
%%time
art_preds_s, art_scores_s = art_plot_results("s", order, limit_t)
Predicting...
Fitting t+1...
1 2 3 4
Weights -0.255585 -0.01136 0.313564 0.818924
1 2 3 4
Weights -0.055682 0.053627 0.145081 0.731377
1 2 3 4
Weights -0.086472 -0.290653 0.199221 1.072153
1 2 3 4
Weights -0.074532 -0.256497 0.560001 0.652402
1 2 3 4
Weights 0.04503 -0.041891 0.158835 0.788967
1 2 3 4
Weights -0.071104 -0.204754 0.120356 1.038711
1 2 3 4
Weights -0.08688 -0.142759 0.415225 0.736634
1 2 3 4
Weights -0.185678 -0.066327 0.101592 1.049717
1 2 3 4
Weights -0.129815 -0.097025 0.078189 1.056087
1 2 3 4
Weights -0.021637 -0.243946 -0.082888 1.25544
1 2 3 4
Weights -0.158904 -0.222866 0.646848 0.591004
1 2 3 4
Weights -0.041966 -0.098744 0.246228 0.797511
1 2 3 4
Weights 0.302188 -0.254555 0.42137 0.454084
1 2 3 4
Weights -0.033364 -0.239709 0.502055 0.69789
1 2 3 4
Weights 0.015932 -0.166423 0.240362 0.848702
1 2 3 4
Weights -0.061307 -0.197824 0.167681 0.994309
1 2 3 4
Weights -0.080071 -0.155193 0.237343 0.907938
1 2 3 4
Weights -0.054032 0.094842 0.238382 0.609204
1 2 3 4
Weights -0.136526 -0.028739 0.178496 0.911827
1 2 3 4
Weights 0.036465 -0.410082 0.254123 1.017688
1 2 3 4
Weights -0.138808 -0.140558 0.222161 0.9519
1 2 3 4
Weights -0.101423 -0.189763 0.264213 0.914141
1 2 3 4
Weights -0.069466 -0.255923 0.13 1.076936
1 2 3 4
Weights -0.185295 -0.016679 -0.074843 1.191167
1 2 3 4
Weights 0.009591 -0.300259 0.464256 0.756829
1 2 3 4
Weights -0.004418 -0.162062 0.325373 0.773807
1 2 3 4
Weights -0.253484 0.068496 0.305645 0.809106
1 2 3 4
Weights 0.051855 -0.234672 0.396711 0.712681
1 2 3 4
Weights -0.06007 -0.049043 0.263158 0.724431
1 2 3 4
Weights 0.121114 -0.398591 0.344617 0.847636
1 2 3 4
Weights -0.085934 -0.380161 0.471132 0.929467
1 2 3 4
Weights -0.022663 -0.364238 0.241989 1.040159
1 2 3 4
Weights -0.100184 -0.107912 0.18664 0.925849
1 2 3 4
Weights -0.05212 -0.216472 0.344732 0.843072
1 2 3 4
Weights 0.170159 0.113226 -0.024498 0.634672
1 2 3 4
Weights -0.030164 -0.123085 0.301298 0.791196
1 2 3 4
Weights -0.051505 0.010284 0.142652 0.797204
1 2 3 4
Weights -0.095866 -0.111411 0.232374 0.843406
1 2 3 4
Weights 0.039805 -0.386263 0.324459 0.959719
1 2 3 4
Weights -0.035947 -0.371006 0.345183 0.990275
1 2 3 4
Weights -0.000588 0.001096 0.316104 0.615533
1 2 3 4
Weights 0.117621 0.113051 0.004942 0.647582
1 2 3 4
Weights -0.139885 0.094827 0.219763 0.740501
1 2 3 4
Weights -0.150142 -0.012396 0.204563 0.840102
1 2 3 4
Weights -0.131747 -0.110322 0.28954 0.867265
1 2 3 4
Weights 0.055366 -0.559806 0.343571 1.066679
1 2 3 4
Weights -0.04913 -0.06709 0.20988 0.805025
1 2 3 4
Weights -0.064604 -0.19958 0.154244 1.043617
1 2 3 4
Weights -0.172486 -0.207997 0.384411 0.899286
1 2 3 4
Weights 0.280663 0.076554 0.075619 0.503552
1 2 3 4
Weights -0.023608 -0.217333 0.123052 1.039497
1 2 3 4
Weights -0.060199 -0.109318 0.389158 0.671495
1 2 3 4
Weights -0.103878 -0.049245 0.226935 0.853896
1 2 3 4
Weights 0.064955 -0.32729 0.273072 0.924667
1 2 3 4
Weights -0.217135 0.108121 0.38163 0.611587
1 2 3 4
Weights 0.046668 0.254702 -0.008694 0.567613
1 2 3 4
Weights -0.144928 -0.148965 0.15578 1.064264
1 2 3 4
Weights 0.102255 -0.094667 0.168106 0.722174
1 2 3 4
Weights 0.007507 -0.309365 0.068561 1.122165
1 2 3 4
Weights -0.056488 -0.093323 0.301315 0.759553
1 2 3 4
Weights -0.088228 -0.108627 0.095778 1.018565
1 2 3 4
Weights 0.118567 0.115323 0.059806 0.608699
1 2 3 4
Weights 0.202978 -0.116824 0.268232 0.583766
1 2 3 4
Weights -0.207456 0.030362 0.349792 0.726016
1 2 3 4
Weights 0.107526 0.034438 -0.100424 0.865525
1 2 3 4
Weights -0.06008 -0.23499 0.064896 1.127952
1 2 3 4
Weights -0.214086 -0.064483 0.353301 0.786902
1 2 3 4
Weights -0.11463 -0.124054 -0.082009 1.240807
1 2 3 4
Weights 0.051376 0.014064 0.042212 0.812644
1 2 3 4
Weights -0.014391 -0.274008 0.012119 1.158152
1 2 3 4
Weights -0.148375 0.06181 0.358565 0.615264
1 2 3 4
Weights -0.194388 -0.182617 0.506295 0.773552
1 2 3 4
Weights -0.099093 -0.024259 -0.100024 1.13517
1 2 3 4
Weights -0.140298 -0.083588 0.348142 0.721826
1 2 3 4
Weights 0.000159 0.000057 0.321995 0.602528
1 2 3 4
Weights 0.313583 0.032354 0.036196 0.393361
1 2 3 4
Weights -0.033068 -0.340202 0.600944 0.706937
1 2 3 4
Weights -0.231421 -0.092669 0.371942 0.87992
1 2 3 4
Weights -0.170143 -0.145339 0.233367 0.976749
1 2 3 4
Weights -0.010278 -0.010987 0.273435 0.612782
1 2 3 4
Weights -0.156512 -0.09238 0.328982 0.820704
1 2 3 4
Weights 0.004969 -0.458166 0.406672 0.938217
1 2 3 4
Weights -0.08685 -0.041725 -0.073327 1.100163
1 2 3 4
Weights 0.082665 -0.067993 -0.019753 0.882192
1 2 3 4
Weights -0.173816 -0.125638 0.145103 1.053924
1 2 3 4
Weights -0.034862 -0.347491 0.267008 0.990461
1 2 3 4
Weights -0.202948 -0.05138 0.256593 0.906465
1 2 3 4
Weights -0.111722 -0.078554 0.276436 0.806268
1 2 3 4
Weights -0.127425 -0.2547 0.324727 0.949023
1 2 3 4
Weights -0.036441 -0.147237 0.376229 0.700271
1 2 3 4
Weights 0.001857 -0.369751 0.133213 1.143233
1 2 3 4
Weights 0.010257 -0.499282 0.366416 1.01717
1 2 3 4
Weights -0.174195 -0.109095 0.2964 0.859678
1 2 3 4
Weights -0.05009 -0.178747 0.314264 0.865257
1 2 3 4
Weights -0.068862 -0.284481 0.388201 0.842867
1 2 3 4
Weights -0.047376 -0.222041 0.304954 0.869161
1 2 3 4
Weights -0.087775 -0.256047 0.165117 1.068152
1 2 3 4
Weights -0.146824 -0.275863 0.269877 1.048894
1 2 3 4
Weights -0.060379 -0.281576 0.46896 0.789163
1 2 3 4
Weights -0.027073 -0.477138 0.552234 0.857717
1 2 3 4
Weights -0.031205 -0.31744 0.479283 0.786598
1 2 3 4
Weights 0.019677 -0.192076 0.465378 0.644047
1 2 3 4
Weights -0.175035 -0.047698 0.165238 0.949378
1 2 3 4
Weights -0.013304 -0.260061 0.275381 0.953007
1 2 3 4
Weights -0.060145 -0.017365 0.121062 0.834593
1 2 3 4
Weights -0.11589 -0.02772 0.070577 0.980401
1 2 3 4
Weights -0.137855 0.019047 0.257351 0.773571
1 2 3 4
Weights -0.14242 -0.224934 0.231638 1.035725
1 2 3 4
Weights -0.096865 -0.089163 0.420466 0.653252
1 2 3 4
Weights -0.216429 -0.066451 0.32581 0.825182
1 2 3 4
Weights -0.127089 0.076313 0.103513 0.835885
1 2 3 4
Weights -0.011454 -0.396302 0.542473 0.810253
1 2 3 4
Weights -0.02051 -0.120254 0.443025 0.604001
1 2 3 4
Weights -0.0634 -0.119702 0.168568 0.880941
1 2 3 4
Weights -0.207299 0.008564 0.188248 0.930583
1 2 3 4
Weights -0.08164 -0.116078 0.343197 0.786344
1 2 3 4
Weights -0.075242 -0.131763 0.221482 0.890717
1 2 3 4
Weights -0.068986 -0.028425 0.320641 0.627344
1 2 3 4
Weights -0.060834 -0.215924 0.224918 0.916893
1 2 3 4
Weights 0.091637 -0.310991 0.271896 0.852402
1 2 3 4
Weights -0.210401 0.005046 0.215832 0.866152
1 2 3 4
Weights -0.017376 -0.19665 0.259883 0.842372
1 2 3 4
Weights -0.054596 -0.365346 0.436941 0.896725
1 2 3 4
Weights 0.080526 0.15308 0.030096 0.690539
1 2 3 4
Weights -0.114153 -0.132032 0.283141 0.876998
1 2 3 4
Weights -0.054835 -0.059695 0.227531 0.839687
1 2 3 4
Weights -0.07124 -0.243637 0.520031 0.724902
1 2 3 4
Weights -0.054432 -0.368236 0.621986 0.736435
1 2 3 4
Weights -0.018126 -0.327098 0.336516 0.909628
1 2 3 4
Weights 0.193591 0.182285 0.049013 0.493012
1 2 3 4
Weights -0.144758 -0.147211 0.23333 0.955593
1 2 3 4
Weights -0.067341 -0.261541 0.271173 0.983236
1 2 3 4
Weights -0.16143 0.020023 0.100289 0.952162
1 2 3 4
Weights 0.097577 -0.406603 0.420521 0.816355
1 2 3 4
Weights -0.129531 -0.047099 0.209134 0.929257
1 2 3 4
Weights -0.103135 -0.349689 0.353397 0.982907
1 2 3 4
Weights -0.024491 -0.172015 0.233097 0.917904
1 2 3 4
Weights 0.024492 -0.06428 0.278425 0.669044
1 2 3 4
Weights 0.015033 -0.168414 0.214371 0.868737
1 2 3 4
Weights -0.118449 -0.236705 0.163722 1.085047
1 2 3 4
Weights -0.13618 -0.043511 0.238105 0.868153
1 2 3 4
Weights -0.123087 -0.172625 0.298519 0.880914
1 2 3 4
Weights -0.135197 -0.166029 0.241105 0.954836
1 2 3 4
Weights 0.005223 -0.168892 0.367337 0.683269
1 2 3 4
Weights -0.093757 -0.061781 0.219476 0.841735
1 2 3 4
Weights -0.0473 -0.257459 0.333606 0.907321
1 2 3 4
Weights -0.18871 0.051217 -0.058647 1.107192
1 2 3 4
Weights -0.012666 -0.372996 0.494787 0.807934
1 2 3 4
Weights -0.198087 -0.100607 0.285524 0.922427
1 2 3 4
Weights -0.037168 -0.240259 0.19016 0.998595
1 2 3 4
Weights 0.10371 -0.262809 0.371145 0.678856
1 2 3 4
Weights -0.15825 -0.076183 0.298341 0.855888
1 2 3 4
Weights -0.17039 -0.017288 0.305664 0.785368
1 2 3 4
Weights -0.032295 -0.09653 0.340541 0.677644
1 2 3 4
Weights -0.012808 -0.052476 0.28308 0.717141
1 2 3 4
Weights -0.128861 -0.215211 0.118351 1.113698
1 2 3 4
Weights -0.098565 -0.06401 0.12425 0.919392
1 2 3 4
Weights -0.14249 0.102455 0.066075 0.912533
1 2 3 4
Weights 0.065655 -0.285342 0.304309 0.865765
1 2 3 4
Weights -0.073317 -0.280684 0.32631 0.940181
1 2 3 4
Weights -0.142418 0.038219 0.085639 0.943043
1 2 3 4
Weights -0.207308 -0.041345 0.335196 0.814874
1 2 3 4
Weights -0.028374 -0.226345 0.491865 0.692961
1 2 3 4
Weights -0.080706 -0.158375 0.200527 0.960518
1 2 3 4
Weights -0.016394 -0.124353 0.319456 0.769229
1 2 3 4
Weights -0.133893 0.020416 0.360506 0.653255
1 2 3 4
Weights -0.140065 -0.140432 0.265255 0.950562
1 2 3 4
Weights -0.104139 -0.107252 0.229987 0.880197
1 2 3 4
Weights -0.195781 -0.069816 0.216669 0.94942
1 2 3 4
Weights -0.214544 -0.128757 0.256784 0.997251
1 2 3 4
Weights -0.16037 -0.11492 0.153685 1.023577
1 2 3 4
Weights -0.221777 -0.163819 0.326647 0.960096
1 2 3 4
Weights -0.089778 -0.135533 0.201065 0.905062
1 2 3 4
Weights -0.125584 -0.181293 0.013866 1.213356
1 2 3 4
Weights -0.019365 -0.11979 0.245406 0.80118
1 2 3 4
Weights -0.163535 -0.143294 0.282095 0.937828
1 2 3 4
Weights -0.0407 -0.240993 0.326819 0.900722
1 2 3 4
Weights -0.202487 -0.193891 0.381398 0.901241
1 2 3 4
Weights -0.062713 0.278859 -0.538007 1.160583
1 2 3 4
Weights -0.208089 -0.135435 0.45353 0.748619
1 2 3 4
Weights 0.099728 -0.37421 0.63946 0.561694
1 2 3 4
Weights -0.147911 -0.223315 0.328781 0.924289
1 2 3 4
Weights -0.064857 -0.057518 0.321597 0.737255
1 2 3 4
Weights 0.16209 0.031063 -0.004958 0.53419
1 2 3 4
Weights -0.007037 -0.284018 0.384895 0.79564
1 2 3 4
Weights -0.048177 -0.322875 0.104892 1.212352
1 2 3 4
Weights -0.107267 -0.119918 0.258366 0.878488
1 2 3 4
Weights -0.037997 -0.35202 0.466327 0.823829
1 2 3 4
Weights -0.030031 -0.004559 0.05903 0.874914
1 2 3 4
Weights -0.228435 -0.063043 0.233921 0.967644
1 2 3 4
Weights -0.117999 -0.124718 0.246435 0.950663
1 2 3 4
Weights -0.093605 -0.006906 0.102649 0.876906
1 2 3 4
Weights -0.129545 -0.023044 0.18386 0.844481
1 2 3 4
Weights -0.092807 -0.131776 0.282139 0.89545
1 2 3 4
Weights -0.009498 -0.311705 0.412706 0.824386
1 2 3 4
Weights -0.128412 -0.118041 0.323202 0.825309
1 2 3 4
Weights -0.161916 -0.163053 0.330386 0.87085
1 2 3 4
Weights 0.010511 0.052463 0.053912 0.704616
1 2 3 4
Weights -0.126994 -0.19504 0.219551 1.019476
1 2 3 4
Weights -0.115298 -0.20497 0.425823 0.816829
1 2 3 4
Weights 0.002651 -0.348249 0.358977 0.939376
1 2 3 4
Weights -0.108273 -0.129958 0.033109 1.106449
1 2 3 4
Weights -0.044944 -0.26287 0.476066 0.758691
1 2 3 4
Weights -0.013013 -0.317016 0.426181 0.842591
1 2 3 4
Weights -0.159968 -0.172638 0.334498 0.881469
1 2 3 4
Weights -0.037341 0.013818 0.223824 0.635338
1 2 3 4
Weights 0.100711 -0.191716 0.329805 0.692442
1 2 3 4
Weights -0.208243 -0.148594 0.549474 0.67443
1 2 3 4
Weights -0.141898 -0.110924 0.232945 0.927262
1 2 3 4
Weights 0.381675 0.062528 0.063654 0.421444
1 2 3 4
Weights 0.029428 -0.45095 0.512332 0.853102
1 2 3 4
Weights -0.032708 -0.262768 0.405787 0.806008
1 2 3 4
Weights -0.083386 -0.227836 0.200279 1.03655
1 2 3 4
Weights 0.015592 -0.129297 0.162262 0.860778
1 2 3 4
Weights 0.025628 -0.33195 0.415192 0.805635
1 2 3 4
Weights 0.053593 0.12952 0.066834 0.606904
1 2 3 4
Weights -0.035522 -0.112468 0.319545 0.772909
1 2 3 4
Weights -0.090998 -0.277814 0.439076 0.858399
1 2 3 4
Weights -0.178236 -0.155317 0.165772 1.076691
1 2 3 4
Weights -0.03619 -0.406361 0.481499 0.863265
1 2 3 4
Weights -0.133475 -0.06212 0.189467 0.9403
1 2 3 4
Weights -0.172166 -0.05671 0.170552 0.962851
1 2 3 4
Weights -0.00556 -0.395592 0.505087 0.830839
1 2 3 4
Weights -0.000324 0.01582 0.15117 0.694887
1 2 3 4
Weights 0.128607 -0.114295 0.245675 0.676194
1 2 3 4
Weights -0.137674 -0.072332 0.140423 0.960372
1 2 3 4
Weights 0.024927 -0.328772 0.376807 0.868938
1 2 3 4
Weights -0.185035 -0.036549 0.217049 0.916408
1 2 3 4
Weights -0.243279 -0.045304 0.310472 0.889702
1 2 3 4
Weights -0.022376 -0.132982 0.280192 0.818175
1 2 3 4
Weights -0.182001 -0.180653 0.320769 0.925469
1 2 3 4
Weights -0.069672 -0.105255 0.324256 0.781141
1 2 3 4
Weights -0.016895 -0.23254 0.153256 0.958345
1 2 3 4
Weights 0.022481 -0.131577 0.21244 0.760523
1 2 3 4
Weights -0.248839 -0.142512 0.462355 0.841014
1 2 3 4
Weights 0.004716 -0.273489 0.594216 0.588842
1 2 3 4
Weights -0.121799 0.023751 0.167091 0.856263
1 2 3 4
Weights -0.095559 -0.27724 0.229249 1.051423
1 2 3 4
Weights -0.124513 -0.065585 0.129546 0.959842
1 2 3 4
Weights -0.039269 -0.211149 0.208379 0.955195
1 2 3 4
Weights -0.15437 -0.128932 0.363735 0.810107
1 2 3 4
Weights -0.176825 0.003974 0.257393 0.82991
1 2 3 4
Weights 0.245072 -0.111408 0.129108 0.683725
1 2 3 4
Weights -0.053653 -0.156746 0.180478 0.906685
1 2 3 4
Weights -0.063273 -0.211343 0.440294 0.752009
1 2 3 4
Weights -0.040329 -0.014287 0.176309 0.811266
1 2 3 4
Weights -0.049439 -0.244092 0.255596 0.968896
1 2 3 4
Weights -0.032799 -0.357149 0.425715 0.911441
1 2 3 4
Weights -0.195576 -0.099886 0.398822 0.794825
1 2 3 4
Weights -0.037229 -0.371153 0.460452 0.836582
1 2 3 4
Weights -0.187042 -0.108681 0.179253 1.012636
1 2 3 4
Weights -0.068056 -0.07185 0.216421 0.842565
1 2 3 4
Weights 0.04445 0.063898 0.055683 0.763813
1 2 3 4
Weights -0.180262 -0.021293 0.266451 0.829585
1 2 3 4
Weights -0.224734 -0.079961 0.329663 0.877935
1 2 3 4
Weights -0.02929 -0.015031 0.279392 0.715913
1 2 3 4
Weights -0.026334 -0.363024 0.227977 1.061936
1 2 3 4
Weights 0.002243 -0.027064 0.152631 0.79091
1 2 3 4
Weights 0.057708 -0.333153 0.392261 0.839584
1 2 3 4
Weights -0.049498 -0.313568 0.177703 1.071589
1 2 3 4
Weights -0.06031 -0.040706 0.315503 0.662942
1 2 3 4
Weights -0.07099 -0.313046 0.515653 0.799314
1 2 3 4
Weights -0.006761 0.031234 0.114174 0.710793
1 2 3 4
Weights -0.00267 -0.395135 0.335507 0.973531
1 2 3 4
Weights -0.145198 -0.114883 0.197498 0.956512
1 2 3 4
Weights -0.029364 -0.206871 0.378952 0.747648
1 2 3 4
Weights -0.030138 -0.321158 0.504516 0.778814
1 2 3 4
Weights -0.131325 -0.153309 0.135789 1.04814
1 2 3 4
Weights -0.17053 -0.024801 0.203526 0.921763
1 2 3 4
Weights -0.078537 -0.147354 0.07966 1.073456
1 2 3 4
Weights -0.035369 -0.413796 0.355409 0.991736
1 2 3 4
Weights -0.072281 -0.208971 0.047599 1.136149
1 2 3 4
Weights -0.144512 0.028178 0.14538 0.871143
1 2 3 4
Weights -0.142803 -0.1101 0.340716 0.799687
1 2 3 4
Weights -0.124621 -0.258389 0.456455 0.807841
1 2 3 4
Weights -0.124354 -0.224497 0.26651 0.990473
1 2 3 4
Weights -0.190709 -0.005547 0.131807 0.982056
1 2 3 4
Weights -0.077748 0.023033 0.323605 0.705532
1 2 3 4
Weights -0.035876 -0.18452 0.314834 0.865413
1 2 3 4
Weights -0.047262 -0.002229 0.226202 0.70838
1 2 3 4
Weights 0.06832 -0.209011 0.25416 0.776763
1 2 3 4
Weights -0.113635 -0.131823 0.146742 0.998626
1 2 3 4
Weights -0.228127 -0.106717 0.231812 1.012897
1 2 3 4
Weights -0.035312 -0.445716 0.497934 0.925608
1 2 3 4
Weights -0.142696 0.205387 0.273841 0.485771
1 2 3 4
Weights -0.05705 -0.229491 0.407891 0.805639
1 2 3 4
Weights -0.19431 -0.187309 0.349044 0.965446
1 2 3 4
Weights -0.118332 -0.061904 0.073043 1.051207
1 2 3 4
Weights 0.02993 -0.395992 0.469445 0.844986
1 2 3 4
Weights -0.106968 -0.078205 0.389156 0.667093
1 2 3 4
Weights -0.078667 0.038441 0.139088 0.796303
1 2 3 4
Weights 0.056119 0.025977 0.18918 0.671473
1 2 3 4
Weights -0.046847 -0.174123 0.269078 0.860805
1 2 3 4
Weights 0.052456 -0.173319 0.358314 0.687485
1 2 3 4
Weights -0.157725 0.068374 0.119818 0.880075
1 2 3 4
Weights -0.015844 0.003139 0.041231 0.858716
1 2 3 4
Weights -0.011016 -0.191651 0.379528 0.746708
1 2 3 4
Weights -0.078963 -0.334799 0.335658 0.972491
1 2 3 4
Weights -0.134622 -0.24039 0.262206 1.015514
1 2 3 4
Weights -0.001953 -0.096623 0.319345 0.715625
1 2 3 4
Weights -0.157859 -0.320383 0.430045 0.96115
Predicting t+1...
Fitting t+2...
1 2 3 4
Weights -0.331067 -0.154482 0.2708 0.955107
1 2 3 4
Weights -0.254019 0.14007 0.189794 0.678325
1 2 3 4
Weights -0.17922 -0.303096 -0.060886 1.314277
1 2 3 4
Weights -0.255413 -0.099201 0.215437 0.92038
1 2 3 4
Weights -0.268158 0.262266 0.124765 0.784745
1 2 3 4
Weights -0.228673 -0.121036 -0.061688 1.153221
1 2 3 4
Weights -0.3667 0.039933 0.279853 0.890132
1 2 3 4
Weights -0.338527 -0.09735 0.051473 1.163239
1 2 3 4
Weights -0.311895 -0.039289 -0.005475 1.149716
1 2 3 4
Weights -0.085514 -0.255333 -0.353015 1.476139
1 2 3 4
Weights -0.246193 -0.191619 0.243531 0.943513
1 2 3 4
Weights -0.204824 0.019447 0.136848 0.856322
1 2 3 4
Weights -0.013035 0.250112 0.027771 0.607615
1 2 3 4
Weights -0.184526 -0.087053 0.188645 0.945767
1 2 3 4
Weights -0.294677 0.138695 0.113182 0.911077
1 2 3 4
Weights -0.321483 0.004927 0.01029 1.086011
1 2 3 4
Weights -0.259458 -0.047325 0.102268 1.014737
1 2 3 4
Weights -0.267182 0.148855 0.29433 0.621062
1 2 3 4
Weights -0.408287 0.105251 0.177416 0.961171
1 2 3 4
Weights -0.119572 -0.21879 -0.112758 1.228704
1 2 3 4
Weights -0.262221 -0.142751 0.094488 1.089788
1 2 3 4
Weights -0.291814 -0.087419 0.099651 1.040384
1 2 3 4
Weights -0.156502 -0.256111 -0.10575 1.261067
1 2 3 4
Weights -0.240798 -0.181255 -0.107336 1.338378
1 2 3 4
Weights -0.33744 0.04405 0.213191 0.93429
1 2 3 4
Weights -0.411768 0.189392 0.221991 0.854256
1 2 3 4
Weights -0.384094 -0.044472 0.359453 0.932754
1 2 3 4
Weights -0.20479 0.058871 0.149629 0.854587
1 2 3 4
Weights -0.331383 0.117173 0.213072 0.757784
1 2 3 4
Weights -0.159521 -0.003243 -0.003989 0.982149
1 2 3 4
Weights -0.260359 -0.263798 0.135613 1.250736
1 2 3 4
Weights -0.139922 -0.280204 -0.084726 1.27588
1 2 3 4
Weights -0.416392 0.107393 0.11942 0.974742
1 2 3 4
Weights -0.287093 -0.025874 0.153343 0.991595
1 2 3 4
Weights -0.287731 0.512291 0.085703 0.468131
1 2 3 4
Weights -0.390294 0.171408 0.232774 0.861946
1 2 3 4
Weights -0.294205 0.161186 0.157482 0.767724
1 2 3 4
Weights -0.248882 -0.043957 0.123041 0.906595
1 2 3 4
Weights -0.26393 -0.037846 0.026832 1.136081
1 2 3 4
Weights -0.334648 -0.102129 0.071195 1.201922
1 2 3 4
Weights -0.253279 0.160585 0.275119 0.69429
1 2 3 4
Weights 0.092522 0.179067 0.115379 0.419974
1 2 3 4
Weights -0.412953 0.162722 0.316728 0.763292
1 2 3 4
Weights -0.33121 0.01628 0.195358 0.879493
1 2 3 4
Weights -0.282504 -0.075758 0.184284 0.999993
1 2 3 4
Weights -0.004046 -0.476512 -0.169901 1.44701
1 2 3 4
Weights -0.199627 0.028154 0.13252 0.83856
1 2 3 4
Weights -0.242257 -0.084245 -0.014182 1.193288
1 2 3 4
Weights -0.303612 -0.21618 0.186644 1.134469
1 2 3 4
Weights -0.197034 0.514301 0.150741 0.40979
1 2 3 4
Weights -0.345906 0.086952 -0.05109 1.12423
1 2 3 4
Weights -0.260789 0.018903 0.232366 0.805507
1 2 3 4
Weights -0.218838 -0.033192 0.170895 0.935726
1 2 3 4
Weights -0.325069 0.112376 0.040428 1.024544
1 2 3 4
Weights -0.250053 -0.078593 0.377116 0.750417
1 2 3 4
Weights -0.182926 0.312009 0.247032 0.371221
1 2 3 4
Weights -0.351116 -0.08377 0.046141 1.22149
1 2 3 4
Weights -0.111509 0.167865 0.061307 0.684198
1 2 3 4
Weights 0.016795 -0.350513 -0.232156 1.328607
1 2 3 4
Weights -0.236112 0.022235 0.190702 0.849941
1 2 3 4
Weights -0.453276 0.179729 0.019925 1.05793
1 2 3 4
Weights -0.14729 0.327833 0.166637 0.471378
1 2 3 4
Weights -0.20005 0.323672 0.147623 0.609577
1 2 3 4
Weights -0.373352 -0.016947 0.348356 0.845936
1 2 3 4
Weights -0.081617 0.291453 -0.07254 0.670897
1 2 3 4
Weights -0.219965 -0.150282 -0.153104 1.289273
1 2 3 4
Weights -0.292772 -0.160896 0.249521 0.94014
1 2 3 4
Weights -0.343851 -0.012307 -0.240037 1.401006
1 2 3 4
Weights -0.339238 0.375879 0.064185 0.723361
1 2 3 4
Weights -0.062705 -0.279323 -0.259634 1.338274
1 2 3 4
Weights -0.236615 -0.019746 0.327299 0.730581
1 2 3 4
Weights -0.375791 -0.145916 0.304031 1.025222
1 2 3 4
Weights -0.327306 0.12161 -0.158503 1.156633
1 2 3 4
Weights -0.252244 -0.087935 0.213648 0.840136
1 2 3 4
Weights -0.153671 0.096972 0.244636 0.682056
1 2 3 4
Weights -0.024045 0.391891 0.061855 0.213598
1 2 3 4
Weights -0.417858 0.011934 0.31433 0.955345
1 2 3 4
Weights -0.400839 -0.124436 0.29434 1.082673
1 2 3 4
Weights -0.282367 -0.192453 0.105115 1.148242
1 2 3 4
Weights -0.182744 0.090742 0.203543 0.644165
1 2 3 4
Weights -0.257124 -0.121117 0.212888 0.970855
1 2 3 4
Weights -0.119557 -0.309136 -0.025747 1.228489
1 2 3 4
Weights -0.328985 0.129902 -0.13987 1.10229
1 2 3 4
Weights 0.01035 0.077721 -0.086603 0.758299
1 2 3 4
Weights -0.36364 -0.106579 0.050093 1.195713
1 2 3 4
Weights -0.099445 -0.315515 -0.065557 1.221389
1 2 3 4
Weights -0.419232 -0.021813 0.228954 1.017596
1 2 3 4
Weights -0.292616 -0.006727 0.193188 0.890162
1 2 3 4
Weights -0.238652 -0.251912 0.087232 1.178134
1 2 3 4
Weights -0.181372 -0.029014 0.172249 0.838147
1 2 3 4
Weights -0.105765 -0.30148 -0.196953 1.395227
1 2 3 4
Weights -0.025216 -0.462728 -0.113462 1.381749
1 2 3 4
Weights -0.308304 -0.126723 0.190188 0.987608
1 2 3 4
Weights -0.344249 0.061455 0.182717 0.994531
1 2 3 4
Weights -0.199251 -0.185981 0.092839 1.048883
1 2 3 4
Weights -0.252051 -0.05011 0.101883 1.003803
1 2 3 4
Weights -0.16446 -0.284137 -0.068876 1.278295
1 2 3 4
Weights -0.204937 -0.380528 0.019463 1.345969
1 2 3 4
Weights -0.343095 -0.038554 0.217235 0.99125
1 2 3 4
Weights -0.241193 -0.245492 0.11337 1.17711
1 2 3 4
Weights -0.248867 -0.098991 0.163107 1.019126
1 2 3 4
Weights -0.252463 0.072253 0.236463 0.828416
1 2 3 4
Weights -0.341763 -0.046508 0.13377 1.025692
1 2 3 4
Weights -0.381109 0.091153 0.109703 1.079319
1 2 3 4
Weights -0.28651 0.130205 0.109519 0.796932
1 2 3 4
Weights -0.298127 0.043416 0.051988 1.002375
1 2 3 4
Weights -0.284568 0.019967 0.261233 0.836777
1 2 3 4
Weights -0.210313 -0.307867 0.028054 1.279065
1 2 3 4
Weights -0.300409 0.006374 0.2769 0.809055
1 2 3 4
Weights -0.384802 -0.088064 0.261394 0.946603
1 2 3 4
Weights -0.373491 0.162413 0.186103 0.791302
1 2 3 4
Weights -0.366638 -0.039294 0.235273 1.052565
1 2 3 4
Weights -0.201502 0.023183 0.230263 0.780409
1 2 3 4
Weights -0.22559 -0.016295 0.055042 0.911676
1 2 3 4
Weights -0.456498 0.057817 0.224015 1.000586
1 2 3 4
Weights -0.238037 -0.033202 0.208707 0.929283
1 2 3 4
Weights -0.271813 -0.005462 0.106052 0.971079
1 2 3 4
Weights -0.178554 -0.001847 0.213055 0.702321
1 2 3 4
Weights -0.212997 -0.112589 0.023436 1.020662
1 2 3 4
Weights -0.387925 0.21667 0.059598 0.887138
1 2 3 4
Weights -0.317631 -0.084351 0.218511 0.937152
1 2 3 4
Weights -0.28371 0.045884 0.090438 0.910256
1 2 3 4
Weights -0.280745 -0.1665 0.121293 1.142939
1 2 3 4
Weights -0.414117 0.521253 0.189508 0.60514
1 2 3 4
Weights -0.268276 -0.076399 0.158071 1.010248
1 2 3 4
Weights -0.342597 0.154382 0.186583 0.904339
1 2 3 4
Weights -0.303913 -0.058476 0.255313 0.969727
1 2 3 4
Weights -0.356284 -0.081971 0.275741 1.032792
1 2 3 4
Weights -0.287511 -0.066442 0.068298 1.06805
1 2 3 4
Weights -0.208627 0.451558 0.225106 0.379824
1 2 3 4
Weights -0.303191 -0.121095 0.108927 1.09646
1 2 3 4
Weights -0.492567 0.106338 0.112901 1.095199
1 2 3 4
Weights -0.362782 0.062335 0.134299 0.97385
1 2 3 4
Weights -0.186136 -0.020642 0.059384 0.997074
1 2 3 4
Weights -0.26009 -0.038943 0.172573 1.047511
1 2 3 4
Weights -0.1355 -0.41241 0.008658 1.301278
1 2 3 4
Weights -0.379387 0.151673 0.1215 1.005331
1 2 3 4
Weights -0.135688 0.086836 0.165641 0.717234
1 2 3 4
Weights -0.191059 0.045641 0.064348 0.934905
1 2 3 4
Weights -0.196813 -0.299413 -0.048271 1.313614
1 2 3 4
Weights -0.450941 0.124163 0.232707 0.934384
1 2 3 4
Weights -0.329549 -0.073481 0.149237 1.00847
1 2 3 4
Weights -0.215299 -0.205845 0.082116 1.122517
1 2 3 4
Weights -0.26365 0.077822 0.18111 0.787504
1 2 3 4
Weights -0.20395 -0.039176 0.14785 0.907551
1 2 3 4
Weights -0.234365 -0.104141 0.106927 1.097022
1 2 3 4
Weights -0.430129 0.120026 -0.024952 1.12849
1 2 3 4
Weights -0.316124 -0.062938 0.175811 1.028272
1 2 3 4
Weights -0.347562 -0.127285 0.201841 1.086892
1 2 3 4
Weights -0.231602 -0.079656 -0.015187 1.130732
1 2 3 4
Weights -0.30971 0.179003 0.147073 0.756195
1 2 3 4
Weights -0.402399 0.017719 0.247659 0.97279
1 2 3 4
Weights -0.359389 -0.000214 0.281843 0.885515
1 2 3 4
Weights -0.159546 -0.001873 0.179158 0.781684
1 2 3 4
Weights -0.253665 0.127709 0.219196 0.781965
1 2 3 4
Weights -0.13537 -0.37983 -0.082793 1.359545
1 2 3 4
Weights -0.265245 0.00701 0.069037 0.939794
1 2 3 4
Weights -0.369588 0.17295 0.174975 0.889021
1 2 3 4
Weights -0.39909 0.211431 0.12717 0.947785
1 2 3 4
Weights -0.2465 -0.164142 0.081251 1.143073
1 2 3 4
Weights -0.363896 0.116032 0.137039 0.947905
1 2 3 4
Weights -0.319439 -0.109959 0.272693 0.962096
1 2 3 4
Weights -0.266881 -0.011947 0.233838 0.910407
1 2 3 4
Weights -0.230279 -0.082172 0.064854 1.083158
1 2 3 4
Weights -0.306042 0.1194 0.213505 0.868854
1 2 3 4
Weights -0.309543 0.028361 0.326484 0.767811
1 2 3 4
Weights -0.307014 -0.097918 0.150713 1.118533
1 2 3 4
Weights -0.253802 -0.047416 0.128368 0.966482
1 2 3 4
Weights -0.386901 -0.057926 0.171615 1.06074
1 2 3 4
Weights -0.324689 -0.225772 0.152584 1.208188
1 2 3 4
Weights -0.314746 -0.115671 0.06187 1.154823
1 2 3 4
Weights -0.236302 -0.355559 0.156338 1.237758
1 2 3 4
Weights -0.317411 0.006759 0.088956 0.966457
1 2 3 4
Weights -0.137464 -0.365826 -0.163427 1.489285
1 2 3 4
Weights -0.329508 0.130128 0.171331 0.840711
1 2 3 4
Weights -0.290596 -0.16223 0.154974 1.115829
1 2 3 4
Weights -0.296153 -0.019223 0.136397 1.063228
1 2 3 4
Weights -0.277055 -0.28531 0.180199 1.154819
1 2 3 4
Weights -0.146468 0.39319 -0.412974 0.816113
1 2 3 4
Weights -0.222216 -0.26162 0.239588 0.986171
1 2 3 4
Weights -0.310371 0.084653 0.245626 0.836142
1 2 3 4
Weights -0.210723 -0.282968 0.102175 1.153403
1 2 3 4
Weights -0.335535 0.111956 0.265948 0.832192
1 2 3 4
Weights 0.186684 0.145575 0.018582 0.264086
1 2 3 4
Weights -0.277929 -0.014931 0.126159 0.936346
1 2 3 4
Weights -0.290397 -0.154349 -0.172089 1.485192
1 2 3 4
Weights -0.299202 -0.024586 0.152156 0.984715
1 2 3 4
Weights -0.203474 -0.183542 0.109056 1.077767
1 2 3 4
Weights -0.342989 0.246163 0.063196 0.812742
1 2 3 4
Weights -0.366963 -0.137305 0.189983 1.123924
1 2 3 4
Weights -0.310779 -0.040332 0.153438 1.100222
1 2 3 4
Weights -0.297998 0.097737 0.104751 0.845789
1 2 3 4
Weights -0.244122 -0.038716 0.161368 0.872526
1 2 3 4
Weights -0.34918 0.035708 0.191281 1.022472
1 2 3 4
Weights -0.260017 -0.05335 0.132456 1.008232
1 2 3 4
Weights -0.375233 0.006347 0.225147 0.939543
1 2 3 4
Weights -0.263536 -0.191524 0.158857 1.047737
1 2 3 4
Weights -0.081441 0.126681 0.104087 0.530887
1 2 3 4
Weights -0.252708 -0.195151 0.053392 1.213806
1 2 3 4
Weights -0.247122 -0.150414 0.202091 1.043282
1 2 3 4
Weights -0.534514 0.181218 0.18566 1.052376
1 2 3 4
Weights -0.280994 -0.069468 -0.089037 1.213497
1 2 3 4
Weights -0.322908 -0.021153 0.229703 0.964744
1 2 3 4
Weights -0.455721 0.09727 0.228626 0.989484
1 2 3 4
Weights -0.238192 -0.222109 0.150411 1.076698
1 2 3 4
Weights -0.100029 0.021715 0.17212 0.626866
1 2 3 4
Weights -0.378242 0.27751 0.202776 0.752446
1 2 3 4
Weights -0.324895 -0.171022 0.308202 0.943324
1 2 3 4
Weights -0.337035 -0.046186 0.1464 1.03989
1 2 3 4
Weights -0.154853 0.579374 0.124087 0.326937
1 2 3 4
Weights -0.405757 0.008065 0.213939 1.055376
1 2 3 4
Weights -0.39511 0.058101 0.209099 0.945818
1 2 3 4
Weights -0.336972 -0.052093 0.025365 1.194343
1 2 3 4
Weights -0.18724 0.080837 0.041406 0.878426
1 2 3 4
Weights -0.268572 -0.004812 0.124959 0.970731
1 2 3 4
Weights -0.129552 0.231298 0.180682 0.460207
1 2 3 4
Weights -0.300784 0.099586 0.198726 0.891653
1 2 3 4
Weights -0.433067 -0.012457 0.24176 1.047034
1 2 3 4
Weights -0.281974 -0.242692 0.035951 1.290252
1 2 3 4
Weights -0.318914 -0.132367 0.14223 1.097917
1 2 3 4
Weights -0.367516 0.042317 0.158405 1.02584
1 2 3 4
Weights -0.364672 -0.027705 0.137197 1.049682
1 2 3 4
Weights -0.35813 -0.038074 0.201228 1.05372
1 2 3 4
Weights -0.047621 0.043496 0.127079 0.632568
1 2 3 4
Weights -0.205677 0.250137 0.13505 0.693797
1 2 3 4
Weights -0.338151 -0.002736 0.087109 1.017536
1 2 3 4
Weights -0.462399 0.159029 0.185509 0.981989
1 2 3 4
Weights -0.436582 0.037191 0.211263 0.996964
1 2 3 4
Weights -0.353886 -0.152147 0.264826 1.062281
1 2 3 4
Weights -0.269936 0.076678 0.164819 0.90965
1 2 3 4
Weights -0.284796 -0.23479 0.148016 1.133402
1 2 3 4
Weights -0.319606 0.059891 0.228152 0.891318
1 2 3 4
Weights -0.09374 -0.165938 -0.074399 1.050834
1 2 3 4
Weights -0.249015 0.128371 0.087266 0.758316
1 2 3 4
Weights -0.381912 -0.207286 0.311356 1.101206
1 2 3 4
Weights -0.255416 -0.004084 0.230416 0.870069
1 2 3 4
Weights -0.287993 0.058689 0.193726 0.882597
1 2 3 4
Weights -0.175285 -0.306502 -0.020817 1.304269
1 2 3 4
Weights -0.283511 -0.025843 0.077245 1.017904
1 2 3 4
Weights -0.397816 0.107737 0.05948 1.029737
1 2 3 4
Weights -0.293519 -0.116291 0.21836 0.974899
1 2 3 4
Weights -0.380875 0.030788 0.270397 0.903371
1 2 3 4
Weights -0.220088 0.442213 0.043927 0.619547
1 2 3 4
Weights -0.36651 0.097114 0.060147 0.934981
1 2 3 4
Weights -0.201444 -0.104437 0.183012 0.964082
1 2 3 4
Weights -0.355272 0.218298 0.182518 0.816182
1 2 3 4
Weights -0.246719 -0.08474 0.050956 1.130517
1 2 3 4
Weights -0.373808 -0.038365 0.172382 1.122118
1 2 3 4
Weights -0.293248 -0.156512 0.262701 0.990252
1 2 3 4
Weights -0.208821 -0.194256 0.091905 1.086499
1 2 3 4
Weights -0.271489 -0.20884 0.083422 1.178354
1 2 3 4
Weights -0.30349 0.079314 0.164064 0.894879
1 2 3 4
Weights -0.016953 0.133653 0.108037 0.642141
1 2 3 4
Weights -0.30832 -0.056582 0.234135 0.922822
1 2 3 4
Weights -0.346704 -0.153755 0.249373 1.053083
1 2 3 4
Weights -0.188076 0.084025 0.23002 0.784064
1 2 3 4
Weights -0.147449 -0.28304 -0.095158 1.30629
1 2 3 4
Weights -0.450155 0.343279 0.16242 0.764846
1 2 3 4
Weights -0.49245 0.232468 0.217817 0.942462
1 2 3 4
Weights -0.077583 -0.360103 -0.118842 1.315185
1 2 3 4
Weights -0.179944 0.007394 0.209872 0.741692
1 2 3 4
Weights -0.39592 -0.042214 0.26519 1.027929
1 2 3 4
Weights -0.154464 0.133572 0.123205 0.617496
1 2 3 4
Weights -0.251434 -0.141746 0.013045 1.182186
1 2 3 4
Weights -0.264715 -0.130126 0.09494 1.078049
1 2 3 4
Weights -0.30281 0.030583 0.179104 0.871374
1 2 3 4
Weights -0.394088 0.017106 0.253246 0.980574
1 2 3 4
Weights -0.290225 -0.126395 0.006181 1.188136
1 2 3 4
Weights -0.416621 0.056435 0.20693 1.000865
1 2 3 4
Weights -0.47355 0.188593 -0.033016 1.140189
1 2 3 4
Weights -0.147269 -0.332145 -0.023097 1.286133
1 2 3 4
Weights -0.233268 -0.134239 -0.148561 1.291489
1 2 3 4
Weights -0.302945 0.039206 0.175099 0.884161
1 2 3 4
Weights -0.334907 -0.044091 0.226899 0.926568
1 2 3 4
Weights -0.325877 -0.139363 0.201422 1.02392
1 2 3 4
Weights -0.261134 -0.203425 0.07083 1.195914
1 2 3 4
Weights -0.278412 -0.103352 0.137864 1.072458
1 2 3 4
Weights -0.208578 0.045944 0.296396 0.818581
1 2 3 4
Weights -0.34715 0.086529 0.185286 0.993633
1 2 3 4
Weights -0.170945 0.058477 0.185066 0.718634
1 2 3 4
Weights -0.202173 0.10377 0.057791 0.814034
1 2 3 4
Weights -0.263314 -0.092176 0.035175 1.103287
1 2 3 4
Weights -0.29963 -0.262432 0.143301 1.229728
1 2 3 4
Weights -0.342639 -0.15373 0.164161 1.204625
1 2 3 4
Weights -0.220767 0.02703 0.374112 0.529567
1 2 3 4
Weights -0.355192 0.014914 0.21786 0.968757
1 2 3 4
Weights -0.284013 -0.274209 0.177119 1.242664
1 2 3 4
Weights -0.317372 0.026868 0.025922 1.139261
1 2 3 4
Weights -0.42993 0.080353 0.219554 1.013386
1 2 3 4
Weights -0.3193 0.011472 0.267789 0.794801
1 2 3 4
Weights -0.248701 0.102836 0.170396 0.76822
1 2 3 4
Weights -0.093803 0.164186 0.178914 0.647558
1 2 3 4
Weights -0.268902 0.00778 0.113698 0.957264
1 2 3 4
Weights -0.291512 0.161753 0.196317 0.784716
1 2 3 4
Weights -0.483102 0.214133 0.207392 0.865402
1 2 3 4
Weights -0.244844 0.194679 0.045216 0.772461
1 2 3 4
Weights -0.267968 0.041282 0.19763 0.881273
1 2 3 4
Weights -0.273125 -0.205822 0.051115 1.198421
1 2 3 4
Weights -0.195777 -0.316761 0.040009 1.269182
1 2 3 4
Weights -0.152628 0.037618 0.17974 0.816226
1 2 3 4
Weights -0.25994 -0.353857 0.134628 1.298769
Predicting t+2...
Fitting t+3...
1 2 3 4
Weights -0.379355 -0.218883 0.172915 1.019266
1 2 3 4
Weights -0.309331 -0.019637 0.274401 0.685174
1 2 3 4
Weights -0.112013 -0.564886 -0.040795 1.346625
1 2 3 4
Weights -0.136973 -0.446906 0.450335 0.795137
1 2 3 4
Weights -0.284365 -0.037066 0.432129 0.744525
1 2 3 4
Weights -0.135418 -0.412924 0.026665 1.116215
1 2 3 4
Weights -0.212441 -0.393649 0.462632 0.902334
1 2 3 4
Weights -0.401291 -0.210915 0.034957 1.221672
1 2 3 4
Weights -0.223667 -0.345208 0.057761 1.184361
1 2 3 4
Weights -0.048653 -0.429763 -0.375076 1.491385
1 2 3 4
Weights -0.182733 -0.436785 0.435821 0.784886
1 2 3 4
Weights -0.20666 -0.151574 0.269466 0.791399
1 2 3 4
Weights 0.238453 -0.191824 0.47158 0.308876
1 2 3 4
Weights -0.105226 -0.361327 0.423297 0.828198
1 2 3 4
Weights -0.265602 -0.208612 0.429801 0.83796
1 2 3 4
Weights -0.270306 -0.331661 0.21867 1.034463
1 2 3 4
Weights -0.254002 -0.258114 0.232675 0.977915
1 2 3 4
Weights -0.277766 -0.059689 0.353536 0.684367
1 2 3 4
Weights -0.467659 -0.116672 0.329319 0.99664
1 2 3 4
Weights 0.002999 -0.585732 0.109958 1.115841
1 2 3 4
Weights -0.213072 -0.356916 0.114401 1.10827
1 2 3 4
Weights -0.287324 -0.319525 0.231061 0.994411
1 2 3 4
Weights -0.154573 -0.410896 -0.079425 1.225676
1 2 3 4
Weights -0.250832 -0.261588 -0.281577 1.484513
1 2 3 4
Weights -0.295257 -0.38832 0.623215 0.826126
1 2 3 4
Weights -0.379199 -0.255199 0.589926 0.820402
1 2 3 4
Weights -0.459777 -0.126501 0.29602 1.083051
1 2 3 4
Weights -0.192267 -0.234435 0.498029 0.705785
1 2 3 4
Weights -0.294337 -0.186578 0.378377 0.732568
1 2 3 4
Weights -0.009881 -0.446113 0.385772 0.787508
1 2 3 4
Weights -0.199176 -0.648254 0.3671 1.253206
1 2 3 4
Weights 0.009321 -0.648682 0.021066 1.251906
1 2 3 4
Weights -0.362568 -0.271195 0.334417 0.9634
1 2 3 4
Weights -0.226872 -0.35228 0.372293 0.943607
1 2 3 4
Weights -0.307217 0.029351 0.489212 0.470013
1 2 3 4
Weights -0.33482 -0.24333 0.521773 0.865389
1 2 3 4
Weights -0.297499 -0.079227 0.30549 0.755478
1 2 3 4
Weights -0.224078 -0.237844 0.208089 0.855365
1 2 3 4
Weights -0.146253 -0.516755 0.395467 1.045692
1 2 3 4
Weights -0.278345 -0.54565 0.393498 1.16129
1 2 3 4
Weights -0.288745 -0.069771 0.470724 0.702095
1 2 3 4
Weights 0.255473 -0.000408 0.179127 0.345851
1 2 3 4
Weights -0.41016 -0.112549 0.388726 0.876644
1 2 3 4
Weights -0.335795 -0.165491 0.232106 0.904538
1 2 3 4
Weights -0.321665 -0.222392 0.262663 1.003742
1 2 3 4
Weights 0.184253 -0.930151 -0.011889 1.419824
1 2 3 4
Weights -0.202172 -0.125167 0.234621 0.786443
1 2 3 4
Weights -0.142316 -0.412672 0.113582 1.206302
1 2 3 4
Weights -0.310535 -0.430917 0.259124 1.158027
1 2 3 4
Weights -0.198384 0.014462 0.5781 0.431221
1 2 3 4
Weights -0.325585 -0.27798 0.261133 1.042696
1 2 3 4
Weights -0.200163 -0.247015 0.388025 0.748434
1 2 3 4
Weights -0.281837 -0.09755 0.21836 0.950097
1 2 3 4
Weights -0.301694 -0.330182 0.513909 0.881833
1 2 3 4
Weights -0.338751 -0.059502 0.262266 0.828337
1 2 3 4
Weights -0.241918 0.063272 0.306753 0.531337
1 2 3 4
Weights -0.387473 -0.299008 0.13759 1.27339
1 2 3 4
Weights -0.128255 -0.034384 0.320054 0.548278
1 2 3 4
Weights -0.0021 -0.383012 -0.257648 1.252369
1 2 3 4
Weights -0.252706 -0.158346 0.338112 0.805638
1 2 3 4
Weights -0.401402 -0.246188 0.306709 1.034492
1 2 3 4
Weights -0.264235 0.110017 0.377544 0.513457
1 2 3 4
Weights -0.134707 -0.118544 0.574538 0.503493
1 2 3 4
Weights -0.442585 -0.145515 0.356509 0.925258
1 2 3 4
Weights -0.027558 0.028845 0.212368 0.519962
1 2 3 4
Weights -0.1488 -0.44385 -0.059925 1.274627
1 2 3 4
Weights -0.288733 -0.282005 0.1956 0.965733
1 2 3 4
Weights -0.270629 -0.380719 -0.131314 1.463564
1 2 3 4
Weights -0.335149 -0.023864 0.421246 0.671184
1 2 3 4
Weights 0.045425 -0.507766 -0.263808 1.306446
1 2 3 4
Weights -0.323131 -0.057991 0.308032 0.766509
1 2 3 4
Weights -0.35274 -0.434928 0.437589 1.040671
1 2 3 4
Weights -0.266444 -0.182446 -0.006163 1.128975
1 2 3 4
Weights -0.286268 -0.197078 0.255567 0.787334
1 2 3 4
Weights -0.148924 -0.062195 0.369682 0.650088
1 2 3 4
Weights -0.093882 0.05446 0.417393 0.169261
1 2 3 4
Weights -0.287813 -0.558613 0.735586 0.893861
1 2 3 4
Weights -0.483969 -0.274966 0.349436 1.174413
1 2 3 4
Weights -0.247477 -0.39713 0.085933 1.206136
1 2 3 4
Weights -0.116801 -0.124561 0.295179 0.593573
1 2 3 4
Weights -0.275694 -0.241536 0.233075 0.97763
1 2 3 4
Weights 0.022154 -0.700763 0.184392 1.131441
1 2 3 4
Weights -0.229062 -0.226789 0.040409 1.049592
1 2 3 4
Weights 0.045978 -0.021119 0.062639 0.587723
1 2 3 4
Weights -0.350011 -0.358051 0.086931 1.261527
1 2 3 4
Weights -0.067384 -0.503188 0.019911 1.130909
1 2 3 4
Weights -0.438095 -0.244262 0.287996 1.096144
1 2 3 4
Weights -0.233048 -0.253426 0.271788 0.883895
1 2 3 4
Weights -0.186869 -0.504643 0.14315 1.186286
1 2 3 4
Weights -0.151632 -0.219963 0.329635 0.734695
1 2 3 4
Weights -0.021602 -0.606244 -0.101727 1.385869
1 2 3 4
Weights 0.164663 -0.872999 -0.01058 1.362165
1 2 3 4
Weights -0.317806 -0.287287 0.204132 0.997028
1 2 3 4
Weights -0.348614 -0.260575 0.469192 0.97033
1 2 3 4
Weights -0.127979 -0.452669 0.241868 0.956476
1 2 3 4
Weights -0.209165 -0.330885 0.301606 0.931202
1 2 3 4
Weights -0.054294 -0.562205 -0.076099 1.309541
1 2 3 4
Weights -0.11735 -0.666485 -0.0364 1.465391
1 2 3 4
Weights -0.287833 -0.43671 0.531337 0.915628
1 2 3 4
Weights -0.152251 -0.697623 0.469359 1.059211
1 2 3 4
Weights -0.193076 -0.449089 0.472782 0.902657
1 2 3 4
Weights -0.216212 -0.259076 0.577782 0.719729
1 2 3 4
Weights -0.356814 -0.216925 0.147676 1.064469
1 2 3 4
Weights -0.38579 -0.317918 0.523964 1.016805
1 2 3 4
Weights -0.263877 -0.116588 0.253079 0.752863
1 2 3 4
Weights -0.227804 -0.215544 0.121502 1.013089
1 2 3 4
Weights -0.360739 -0.071603 0.302963 0.889368
1 2 3 4
Weights -0.157791 -0.527409 -0.014813 1.35925
1 2 3 4
Weights -0.24624 -0.258019 0.409922 0.775801
1 2 3 4
Weights -0.389808 -0.285398 0.271827 0.987854
1 2 3 4
Weights -0.400607 -0.060893 0.270417 0.833926
1 2 3 4
Weights -0.351457 -0.506126 0.714542 0.948354
1 2 3 4
Weights -0.141505 -0.221319 0.424469 0.68205
1 2 3 4
Weights -0.098941 -0.311028 0.162804 0.838233
1 2 3 4
Weights -0.549896 -0.113467 0.298834 1.084864
1 2 3 4
Weights -0.251078 -0.206249 0.342076 0.905856
1 2 3 4
Weights -0.281888 -0.208303 0.251882 0.927124
1 2 3 4
Weights -0.156744 -0.131327 0.255021 0.64325
1 2 3 4
Weights -0.149715 -0.353383 0.135594 0.932657
1 2 3 4
Weights -0.305649 -0.34161 0.57329 0.720976
1 2 3 4
Weights -0.347879 -0.179539 0.147085 0.998085
1 2 3 4
Weights -0.194986 -0.311796 0.328425 0.815435
1 2 3 4
Weights -0.231604 -0.543735 0.404727 1.071711
1 2 3 4
Weights -0.405961 0.002437 0.554546 0.701884
1 2 3 4
Weights -0.214773 -0.312888 0.234864 1.016297
1 2 3 4
Weights -0.392166 -0.096851 0.424645 0.912191
1 2 3 4
Weights -0.247293 -0.407824 0.532859 0.903379
1 2 3 4
Weights -0.373295 -0.494832 0.75172 0.900641
1 2 3 4
Weights -0.163769 -0.505771 0.340142 0.985533
1 2 3 4
Weights -0.107812 -0.039679 0.480944 0.463945
1 2 3 4
Weights -0.330253 -0.294143 0.169753 1.104247
1 2 3 4
Weights -0.462905 -0.386272 0.503093 1.057055
1 2 3 4
Weights -0.442343 -0.064524 0.185377 1.016624
1 2 3 4
Weights -0.049221 -0.476206 0.467929 0.820444
1 2 3 4
Weights -0.324191 -0.128999 0.215914 1.111754
1 2 3 4
Weights -0.061439 -0.669397 0.025839 1.316056
1 2 3 4
Weights -0.417207 -0.18767 0.478032 0.964637
1 2 3 4
Weights -0.075154 -0.119104 0.316175 0.636057
1 2 3 4
Weights -0.174648 -0.187114 0.289099 0.84508
1 2 3 4
Weights -0.123552 -0.547432 -0.087647 1.384802
1 2 3 4
Weights -0.477944 -0.173998 0.420561 0.986386
1 2 3 4
Weights -0.24988 -0.392278 0.26101 0.99727
1 2 3 4
Weights -0.220774 -0.336304 0.084256 1.126207
1 2 3 4
Weights -0.20893 -0.250211 0.447924 0.680737
1 2 3 4
Weights -0.307486 -0.06494 0.205528 0.882588
1 2 3 4
Weights -0.183475 -0.396901 0.304654 1.059244
1 2 3 4
Weights -0.481649 -0.064502 0.038993 1.17836
1 2 3 4
Weights -0.262941 -0.496865 0.569098 0.906375
1 2 3 4
Weights -0.389524 -0.279133 0.224981 1.156324
1 2 3 4
Weights -0.173981 -0.371672 0.160634 1.071978
1 2 3 4
Weights -0.224187 -0.308707 0.585705 0.597302
1 2 3 4
Weights -0.470314 -0.18731 0.390203 1.01514
1 2 3 4
Weights -0.423362 -0.148962 0.34336 0.937368
1 2 3 4
Weights -0.128767 -0.16528 0.296813 0.695651
1 2 3 4
Weights -0.267184 -0.111213 0.425962 0.761968
1 2 3 4
Weights -0.060876 -0.564037 -0.228925 1.46982
1 2 3 4
Weights -0.159527 -0.264418 0.131337 0.918806
1 2 3 4
Weights -0.407952 -0.017173 0.248159 0.976508
1 2 3 4
Weights -0.404706 -0.269571 0.657336 0.83583
1 2 3 4
Weights -0.16237 -0.495143 0.237402 1.118951
1 2 3 4
Weights -0.35821 -0.11235 0.214534 1.005159
1 2 3 4
Weights -0.329706 -0.246039 0.248703 1.023898
1 2 3 4
Weights -0.292451 -0.287336 0.565972 0.797572
1 2 3 4
Weights -0.252685 -0.2633 0.202121 1.050374
1 2 3 4
Weights -0.344312 -0.154743 0.49936 0.834996
1 2 3 4
Weights -0.349795 -0.125874 0.385354 0.810479
1 2 3 4
Weights -0.32653 -0.293599 0.235734 1.165988
1 2 3 4
Weights -0.169166 -0.295351 0.191864 0.955434
1 2 3 4
Weights -0.402004 -0.265376 0.209544 1.118263
1 2 3 4
Weights -0.278935 -0.462395 0.090547 1.346575
1 2 3 4
Weights -0.287345 -0.341818 0.079654 1.206001
1 2 3 4
Weights -0.208184 -0.507713 0.030626 1.37211
1 2 3 4
Weights -0.308807 -0.243886 0.240856 0.910753
1 2 3 4
Weights -0.134498 -0.477486 -0.3421 1.657039
1 2 3 4
Weights -0.305066 -0.208985 0.408792 0.822918
1 2 3 4
Weights -0.23598 -0.400805 0.166691 1.178886
1 2 3 4
Weights -0.196034 -0.414835 0.380968 1.046619
1 2 3 4
Weights -0.272727 -0.463843 0.167036 1.203568
1 2 3 4
Weights -0.384922 0.53426 -0.194275 0.546636
1 2 3 4
Weights -0.246328 -0.335836 0.209682 0.963377
1 2 3 4
Weights -0.233771 -0.455357 0.844881 0.611192
1 2 3 4
Weights -0.138802 -0.502138 0.087775 1.178103
1 2 3 4
Weights -0.331226 -0.176285 0.468107 0.844997
1 2 3 4
Weights 0.05658 0.196579 0.150372 0.149695
1 2 3 4
Weights -0.209242 -0.383557 0.422446 0.80736
1 2 3 4
Weights -0.305417 -0.482934 0.025658 1.537494
1 2 3 4
Weights -0.285847 -0.252535 0.269132 0.974793
1 2 3 4
Weights -0.140517 -0.500839 0.36321 0.956759
1 2 3 4
Weights -0.386362 -0.027683 0.313217 0.759026
1 2 3 4
Weights -0.399091 -0.289851 0.152454 1.230972
1 2 3 4
Weights -0.364268 -0.21565 0.281802 1.139953
1 2 3 4
Weights -0.283932 -0.118124 0.205201 0.819525
1 2 3 4
Weights -0.301173 -0.111799 0.158968 0.86582
1 2 3 4
Weights -0.291354 -0.303092 0.376111 1.060985
1 2 3 4
Weights -0.209528 -0.411864 0.455952 0.889526
1 2 3 4
Weights -0.320497 -0.315252 0.369033 0.951225
1 2 3 4
Weights -0.192265 -0.41732 0.159841 1.060389
1 2 3 4
Weights -0.214307 0.119716 0.210401 0.455082
1 2 3 4
Weights -0.253083 -0.384596 0.091136 1.25412
1 2 3 4
Weights -0.168493 -0.423306 0.314034 1.037056
1 2 3 4
Weights -0.483773 -0.450625 0.751503 0.994993
1 2 3 4
Weights -0.24443 -0.313172 -0.025047 1.219647
1 2 3 4
Weights -0.369919 -0.325235 0.586627 0.862918
1 2 3 4
Weights -0.499658 -0.356779 0.723247 0.901431
1 2 3 4
Weights -0.201958 -0.399761 0.148139 1.084196
1 2 3 4
Weights -0.189983 0.017941 0.200089 0.569684
1 2 3 4
Weights -0.302428 -0.261698 0.666231 0.674165
1 2 3 4
Weights -0.268833 -0.414561 0.383689 0.917059
1 2 3 4
Weights -0.340076 -0.263549 0.235222 1.06374
1 2 3 4
Weights -0.142347 0.01027 0.629497 0.333562
1 2 3 4
Weights -0.425921 -0.4982 0.792534 0.916775
1 2 3 4
Weights -0.352318 -0.382786 0.569974 0.87309
1 2 3 4
Weights -0.297993 -0.396728 0.223348 1.19702
1 2 3 4
Weights -0.115604 -0.188932 0.243138 0.780428
1 2 3 4
Weights -0.220614 -0.391326 0.502039 0.826828
1 2 3 4
Weights -0.192755 0.062281 0.276211 0.492865
1 2 3 4
Weights -0.258467 -0.215957 0.435119 0.868616
1 2 3 4
Weights -0.415455 -0.437208 0.576848 1.025479
1 2 3 4
Weights -0.305563 -0.397887 -0.017008 1.393253
1 2 3 4
Weights -0.221244 -0.606035 0.480822 1.006048
1 2 3 4
Weights -0.406876 -0.167624 0.280095 1.07165
1 2 3 4
Weights -0.385085 -0.215229 0.179403 1.103543
1 2 3 4
Weights -0.258612 -0.563808 0.6226 0.972919
1 2 3 4
Weights -0.044896 -0.00755 0.144458 0.563405
1 2 3 4
Weights -0.17623 -0.105319 0.495816 0.594864
1 2 3 4
Weights -0.341526 -0.211113 0.163501 1.020821
1 2 3 4
Weights -0.396138 -0.422144 0.693231 0.907303
1 2 3 4
Weights -0.436209 -0.229906 0.29889 1.070654
1 2 3 4
Weights -0.425194 -0.242393 0.218279 1.161766
1 2 3 4
Weights -0.25042 -0.201744 0.3953 0.871436
1 2 3 4
Weights -0.313203 -0.38596 0.159189 1.155589
1 2 3 4
Weights -0.307198 -0.216824 0.42368 0.880332
1 2 3 4
Weights -0.128408 -0.233745 0.011907 0.901633
1 2 3 4
Weights -0.222449 -0.163507 0.340676 0.633698
1 2 3 4
Weights -0.397358 -0.424264 0.349527 1.1849
1 2 3 4
Weights -0.230079 -0.3565 0.652927 0.677403
1 2 3 4
Weights -0.333125 -0.07081 0.239234 0.926992
1 2 3 4
Weights -0.159105 -0.500936 0.001156 1.335697
1 2 3 4
Weights -0.326145 -0.151237 0.126676 1.015599
1 2 3 4
Weights -0.343017 -0.32261 0.382384 0.9664
1 2 3 4
Weights -0.267585 -0.320655 0.275284 0.976641
1 2 3 4
Weights -0.444507 -0.133706 0.340078 0.973145
1 2 3 4
Weights -0.218814 -0.028219 0.58596 0.488362
1 2 3 4
Weights -0.230046 -0.34862 0.295482 0.867767
1 2 3 4
Weights -0.140476 -0.345739 0.352178 0.884664
1 2 3 4
Weights -0.3604 -0.095626 0.421051 0.823387
1 2 3 4
Weights -0.226886 -0.349625 0.245901 1.091963
1 2 3 4
Weights -0.266267 -0.560865 0.533613 1.105662
1 2 3 4
Weights -0.272415 -0.327591 0.266846 1.023127
1 2 3 4
Weights -0.034392 -0.622597 0.306914 0.998252
1 2 3 4
Weights -0.255871 -0.360781 0.006489 1.265798
1 2 3 4
Weights -0.311355 -0.158613 0.328711 0.88631
1 2 3 4
Weights -0.063316 0.096428 0.173931 0.603593
1 2 3 4
Weights -0.362987 -0.155953 0.233643 0.959834
1 2 3 4
Weights -0.389928 -0.287074 0.233789 1.124915
1 2 3 4
Weights -0.218199 -0.056231 0.357046 0.781987
1 2 3 4
Weights 0.008666 -0.671704 0.007991 1.3049
1 2 3 4
Weights -0.408492 -0.142974 0.522547 0.753413
1 2 3 4
Weights -0.471443 -0.366503 0.82166 0.856299
1 2 3 4
Weights 0.001295 -0.56696 -0.135563 1.311509
1 2 3 4
Weights -0.153526 -0.137747 0.273885 0.687672
1 2 3 4
Weights -0.326591 -0.513145 0.622345 0.984187
1 2 3 4
Weights -0.235286 0.033129 0.252424 0.543736
1 2 3 4
Weights -0.113653 -0.611507 0.293029 1.11386
1 2 3 4
Weights -0.257053 -0.289395 0.098859 1.097715
1 2 3 4
Weights -0.233754 -0.324827 0.436375 0.778056
1 2 3 4
Weights -0.328248 -0.471494 0.660467 0.909678
1 2 3 4
Weights -0.267695 -0.352097 0.046883 1.216412
1 2 3 4
Weights -0.490195 -0.132453 0.316883 1.065899
1 2 3 4
Weights -0.412128 -0.289355 0.3029 1.113957
1 2 3 4
Weights -0.020226 -0.710989 0.123303 1.258105
1 2 3 4
Weights -0.121498 -0.474324 -0.066969 1.305298
1 2 3 4
Weights -0.213562 -0.202954 0.179175 0.931203
1 2 3 4
Weights -0.288584 -0.307067 0.317357 0.9268
1 2 3 4
Weights -0.188527 -0.541041 0.354246 1.000752
1 2 3 4
Weights -0.220544 -0.457773 0.132377 1.225364
1 2 3 4
Weights -0.326232 -0.172649 0.065289 1.161258
1 2 3 4
Weights -0.183727 -0.107535 0.346917 0.872906
1 2 3 4
Weights -0.337306 -0.255787 0.496169 0.969795
1 2 3 4
Weights -0.162521 -0.074163 0.255323 0.679253
1 2 3 4
Weights -0.182301 -0.187772 0.376003 0.649638
1 2 3 4
Weights -0.283249 -0.246963 0.091311 1.095065
1 2 3 4
Weights -0.326648 -0.383269 0.034025 1.36689
1 2 3 4
Weights -0.286881 -0.649273 0.565654 1.159305
1 2 3 4
Weights -0.151555 -0.077965 0.189109 0.640424
1 2 3 4
Weights -0.345198 -0.335342 0.522883 0.917308
1 2 3 4
Weights -0.276172 -0.481722 0.170831 1.361166
1 2 3 4
Weights -0.311774 -0.19915 0.127933 1.181758
1 2 3 4
Weights -0.351337 -0.513135 0.756235 0.919918
1 2 3 4
Weights -0.436051 -0.14029 0.44403 0.741199
1 2 3 4
Weights -0.273885 -0.046597 0.233304 0.776131
1 2 3 4
Weights -0.06122 -0.010209 0.306489 0.617824
1 2 3 4
Weights -0.219005 -0.282158 0.309851 0.893353
1 2 3 4
Weights -0.354909 -0.151588 0.592911 0.676864
1 2 3 4
Weights -0.558494 -0.043924 0.360772 0.930457
1 2 3 4
Weights -0.265647 -0.015885 0.233015 0.700081
1 2 3 4
Weights -0.13809 -0.348905 0.431867 0.832746
1 2 3 4
Weights -0.179012 -0.591771 0.224464 1.177661
1 2 3 4
Weights -0.176649 -0.495329 0.017732 1.326113
1 2 3 4
Weights -0.126417 -0.142127 0.337277 0.7505
1 2 3 4
Weights -0.223278 -0.660143 0.214303 1.369581
Predicting t+3...
Fitting t+4...
1 2 3 4
Weights -0.367258 -0.298068 0.118005 0.984536
1 2 3 4
Weights -0.238984 -0.124473 0.103913 0.772209
1 2 3 4
Weights -0.050958 -0.581434 -0.302964 1.423858
1 2 3 4
Weights -0.239739 -0.219281 0.090356 0.912073
1 2 3 4
Weights -0.241471 -0.09563 0.125355 1.02136
1 2 3 4
Weights -0.118661 -0.328303 -0.273013 1.169345
1 2 3 4
Weights -0.289067 -0.184103 0.063275 1.07698
1 2 3 4
Weights -0.312957 -0.390497 -0.079878 1.289882
1 2 3 4
Weights -0.210798 -0.280523 -0.249196 1.289689
1 2 3 4
Weights -0.001837 -0.458514 -0.546475 1.500699
1 2 3 4
Weights -0.277474 -0.26122 0.154145 0.842672
1 2 3 4
Weights -0.205498 -0.145018 0.082526 0.873256
1 2 3 4
Weights -0.168575 0.27 0.09691 0.57662
1 2 3 4
Weights -0.218199 -0.17308 0.150676 0.946774
1 2 3 4
Weights -0.249513 -0.183832 0.05917 1.095188
1 2 3 4
Weights -0.317928 -0.219435 -0.119648 1.176941
1 2 3 4
Weights -0.227635 -0.270311 0.007716 1.079133
1 2 3 4
Weights -0.193967 -0.119537 0.139204 0.775866
1 2 3 4
Weights -0.533801 -0.115415 0.123512 1.176167
1 2 3 4
Weights 0.002922 -0.427877 -0.286027 1.226743
1 2 3 4
Weights -0.218717 -0.311212 -0.096847 1.14503
1 2 3 4
Weights -0.248686 -0.341684 -0.020507 1.091647
1 2 3 4
Weights -0.071322 -0.493404 -0.247255 1.240647
1 2 3 4
Weights -0.222557 -0.343716 -0.368773 1.496836
1 2 3 4
Weights -0.336519 -0.284847 0.159543 1.141916
1 2 3 4
Weights -0.44665 -0.166231 0.155136 1.14768
1 2 3 4
Weights -0.51323 -0.18225 0.262544 1.133896
1 2 3 4
Weights -0.244216 -0.156736 0.164952 0.938188
1 2 3 4
Weights -0.277501 -0.162208 0.065246 0.879374
1 2 3 4
Weights -0.203716 -0.081183 -0.057891 0.958902
1 2 3 4
Weights -0.205303 -0.584253 -0.013328 1.483573
1 2 3 4
Weights -0.046402 -0.435542 -0.336289 1.307018
1 2 3 4
Weights -0.295955 -0.278681 -0.057966 1.18125
1 2 3 4
Weights -0.216291 -0.289112 0.026668 1.123731
1 2 3 4
Weights -0.190745 -0.071239 0.008257 0.846798
1 2 3 4
Weights -0.296171 -0.222022 0.09533 1.166393
1 2 3 4
Weights -0.256488 -0.11789 0.058641 0.89468
1 2 3 4
Weights -0.182625 -0.251562 -0.000311 0.908648
1 2 3 4
Weights -0.152413 -0.367124 -0.106852 1.324577
1 2 3 4
Weights -0.256683 -0.497052 -0.069193 1.450187
1 2 3 4
Weights -0.344961 -0.069485 0.258214 0.902508
1 2 3 4
Weights 0.365692 0.07345 -0.001717 0.338276
1 2 3 4
Weights -0.436978 -0.088667 0.140986 1.035019
1 2 3 4
Weights -0.356144 -0.157639 0.056935 0.957548
1 2 3 4
Weights -0.308762 -0.274414 0.112725 1.087314
1 2 3 4
Weights 0.173251 -0.719831 -0.470703 1.54238
1 2 3 4
Weights -0.187153 -0.135396 0.067711 0.848957
1 2 3 4
Weights -0.116908 -0.345306 -0.217825 1.353406
1 2 3 4
Weights -0.328305 -0.43101 0.057577 1.245167
1 2 3 4
Weights -0.133786 -0.019225 0.0729 0.854815
1 2 3 4
Weights -0.307107 -0.257857 -0.116383 1.273326
1 2 3 4
Weights -0.248777 -0.144713 0.11885 0.855606
1 2 3 4
Weights -0.395307 -0.067207 0.178125 0.986716
1 2 3 4
Weights -0.275151 -0.301655 0.031678 1.232375
1 2 3 4
Weights -0.377699 -0.125116 0.318704 0.760964
1 2 3 4
Weights -0.14581 -0.008581 0.056183 0.655007
1 2 3 4
Weights -0.390499 -0.347116 -0.072178 1.422297
1 2 3 4
Weights -0.166776 -0.02 0.097613 0.707867
1 2 3 4
Weights 0.010332 -0.394807 -0.295048 1.143359
1 2 3 4
Weights -0.292618 -0.13805 0.153662 0.914119
1 2 3 4
Weights -0.317649 -0.276775 -0.129626 1.309773
1 2 3 4
Weights -0.259085 -0.002626 0.161456 0.749201
1 2 3 4
Weights -0.177784 -0.025523 0.110402 0.866394
1 2 3 4
Weights -0.461522 -0.208886 0.257091 0.992864
1 2 3 4
Weights 0.076149 -0.028859 -0.021591 0.65812
1 2 3 4
Weights -0.091403 -0.437926 -0.355962 1.36678
1 2 3 4
Weights -0.314243 -0.263668 0.0889 0.926485
1 2 3 4
Weights -0.141633 -0.492276 -0.491995 1.683967
1 2 3 4
Weights -0.157996 -0.169162 0.011034 0.975049
1 2 3 4
Weights 0.024907 -0.369368 -0.490414 1.25699
1 2 3 4
Weights -0.371395 -0.115048 0.29531 0.766859
1 2 3 4
Weights -0.388727 -0.386935 0.170871 1.173837
1 2 3 4
Weights -0.198863 -0.197963 -0.301305 1.256784
1 2 3 4
Weights -0.260575 -0.240735 0.122372 0.794275
1 2 3 4
Weights -0.211473 -0.019873 0.221379 0.755896
1 2 3 4
Weights -0.136283 -0.003996 0.080773 0.513353
1 2 3 4
Weights -0.369426 -0.347915 0.176523 1.240407
1 2 3 4
Weights -0.550135 -0.325533 0.246614 1.293266
1 2 3 4
Weights -0.247063 -0.383436 -0.106159 1.244401
1 2 3 4
Weights -0.16094 -0.031922 0.077149 0.65192
1 2 3 4
Weights -0.25446 -0.281632 0.106728 1.007892
1 2 3 4
Weights -0.012888 -0.484558 -0.23054 1.23328
1 2 3 4
Weights -0.165174 -0.196305 -0.306099 1.17576
1 2 3 4
Weights -0.093822 0.134326 -0.035692 0.583633
1 2 3 4
Weights -0.309029 -0.413231 -0.161306 1.382776
1 2 3 4
Weights -0.089727 -0.416332 -0.184914 1.115264
1 2 3 4
Weights -0.400249 -0.324006 0.073665 1.236226
1 2 3 4
Weights -0.307484 -0.126904 0.03737 0.941975
1 2 3 4
Weights -0.194575 -0.450369 -0.105074 1.24471
1 2 3 4
Weights -0.205977 -0.13486 0.121196 0.808032
1 2 3 4
Weights 0.057222 -0.612919 -0.421976 1.50176
1 2 3 4
Weights 0.19892 -0.712686 -0.437362 1.458915
1 2 3 4
Weights -0.337783 -0.284686 0.053222 1.011916
1 2 3 4
Weights -0.44604 -0.17594 0.167656 1.216202
1 2 3 4
Weights -0.111719 -0.365824 -0.065123 1.030347
1 2 3 4
Weights -0.19156 -0.286157 -0.005516 1.071114
1 2 3 4
Weights -0.032976 -0.490062 -0.353238 1.342419
1 2 3 4
Weights -0.002029 -0.763471 -0.318736 1.589898
1 2 3 4
Weights -0.299278 -0.350703 0.102647 1.167524
1 2 3 4
Weights -0.159502 -0.545689 -0.040415 1.306056
1 2 3 4
Weights -0.232567 -0.324522 0.083382 1.109417
1 2 3 4
Weights -0.317592 -0.138386 0.242966 0.975475
1 2 3 4
Weights -0.323079 -0.276652 -0.020965 1.122775
1 2 3 4
Weights -0.404328 -0.298928 0.113517 1.366654
1 2 3 4
Weights -0.166521 -0.175999 -0.009062 0.866872
1 2 3 4
Weights -0.150404 -0.226929 -0.143243 1.104928
1 2 3 4
Weights -0.395317 -0.133005 0.228765 0.963427
1 2 3 4
Weights -0.108595 -0.559081 -0.228887 1.423607
1 2 3 4
Weights -0.258048 -0.198368 0.149805 0.877202
1 2 3 4
Weights -0.340664 -0.347332 0.072607 1.046796
1 2 3 4
Weights -0.302042 -0.175457 0.040932 0.956129
1 2 3 4
Weights -0.311973 -0.482334 0.171135 1.358719
1 2 3 4
Weights -0.228682 -0.094427 0.17503 0.804625
1 2 3 4
Weights -0.154986 -0.131374 -0.135478 0.877899
1 2 3 4
Weights -0.55848 -0.211006 0.141333 1.243892
1 2 3 4
Weights -0.302927 -0.174529 0.177339 1.011545
1 2 3 4
Weights -0.324097 -0.170451 0.047077 1.024583
1 2 3 4
Weights -0.175324 -0.094532 0.112775 0.644889
1 2 3 4
Weights -0.130712 -0.287028 -0.127803 0.964781
1 2 3 4
Weights -0.213665 -0.298149 -0.06254 1.118218
1 2 3 4
Weights -0.267259 -0.296289 0.048151 0.996228
1 2 3 4
Weights -0.218247 -0.186751 -0.047313 0.965547
1 2 3 4
Weights -0.203812 -0.493307 -0.014253 1.302158
1 2 3 4
Weights -0.363582 0.000971 0.036001 1.126228
1 2 3 4
Weights -0.272924 -0.20716 0.014268 1.085339
1 2 3 4
Weights -0.416238 -0.129601 0.1791 1.157749
1 2 3 4
Weights -0.370755 -0.240014 0.211766 1.094152
1 2 3 4
Weights -0.333391 -0.489412 0.235291 1.294201
1 2 3 4
Weights -0.152982 -0.365881 -0.128712 1.183256
1 2 3 4
Weights -0.149731 0.108303 -0.003068 0.777631
1 2 3 4
Weights -0.294242 -0.361362 -0.008924 1.182593
1 2 3 4
Weights -0.452954 -0.354153 -0.000716 1.412724
1 2 3 4
Weights -0.435833 -0.152051 0.059136 1.113647
1 2 3 4
Weights -0.146589 -0.20352 -0.026502 1.058092
1 2 3 4
Weights -0.39948 -0.131434 0.151125 1.202193
1 2 3 4
Weights -0.053142 -0.613228 -0.227163 1.351856
1 2 3 4
Weights -0.423082 -0.212643 0.128445 1.284101
1 2 3 4
Weights -0.114818 -0.028482 0.097807 0.730126
1 2 3 4
Weights -0.37514 0.019606 0.080199 0.959936
1 2 3 4
Weights -0.054587 -0.580287 -0.334073 1.449335
1 2 3 4
Weights -0.469568 -0.221699 0.131513 1.216095
1 2 3 4
Weights -0.2221 -0.336265 -0.067915 1.10508
1 2 3 4
Weights -0.177398 -0.39553 -0.048169 1.14469
1 2 3 4
Weights -0.225015 -0.171022 0.093907 0.868656
1 2 3 4
Weights -0.418384 -0.07138 0.196352 0.899803
1 2 3 4
Weights -0.253479 -0.270402 0.022476 1.199368
1 2 3 4
Weights -0.410781 -0.202973 -0.146081 1.312489
1 2 3 4
Weights -0.258406 -0.40289 0.072838 1.202044
1 2 3 4
Weights -0.347711 -0.388582 0.08134 1.253441
1 2 3 4
Weights -0.19537 -0.276061 -0.140146 1.182064
1 2 3 4
Weights -0.163758 -0.233689 0.007133 0.940679
1 2 3 4
Weights -0.487627 -0.251568 0.199415 1.187008
1 2 3 4
Weights -0.4529 -0.199021 0.214834 1.033477
1 2 3 4
Weights -0.130166 -0.124457 0.104732 0.75315
1 2 3 4
Weights -0.3036 -0.098094 0.191365 0.952435
1 2 3 4
Weights -0.022628 -0.577384 -0.403374 1.462676
1 2 3 4
Weights -0.197739 -0.121111 -0.139214 0.953968
1 2 3 4
Weights -0.350877 -0.112945 0.055797 1.129866
1 2 3 4
Weights -0.356566 -0.29432 0.124647 1.283835
1 2 3 4
Weights -0.173243 -0.393998 -0.097651 1.246876
1 2 3 4
Weights -0.370698 -0.098698 -0.010049 1.13789
1 2 3 4
Weights -0.353676 -0.252277 0.135857 1.043352
1 2 3 4
Weights -0.35678 -0.240043 0.266466 1.035289
1 2 3 4
Weights -0.326915 -0.202441 0.008954 1.154682
1 2 3 4
Weights -0.421389 -0.128509 0.239996 1.084016
1 2 3 4
Weights -0.339435 -0.177888 0.240999 0.895332
1 2 3 4
Weights -0.296557 -0.359581 0.046343 1.304793
1 2 3 4
Weights -0.1559 -0.223281 -0.061404 1.012689
1 2 3 4
Weights -0.386462 -0.316695 0.014769 1.217145
1 2 3 4
Weights -0.268748 -0.479957 -0.117594 1.434201
1 2 3 4
Weights -0.238867 -0.384663 -0.144396 1.292476
1 2 3 4
Weights -0.170002 -0.576598 -0.093961 1.398216
1 2 3 4
Weights -0.252166 -0.27876 -0.027946 1.021814
1 2 3 4
Weights -0.144706 -0.520075 -0.451069 1.68384
1 2 3 4
Weights -0.292685 -0.208789 0.095424 1.031332
1 2 3 4
Weights -0.176813 -0.425685 -0.07169 1.270562
1 2 3 4
Weights -0.225789 -0.287407 -0.007185 1.26664
1 2 3 4
Weights -0.26626 -0.486279 0.002721 1.238835
1 2 3 4
Weights -0.429989 0.144112 0.223623 0.460874
1 2 3 4
Weights -0.223262 -0.372198 0.120313 0.921667
1 2 3 4
Weights -0.231877 -0.308065 0.143159 1.091798
1 2 3 4
Weights -0.149466 -0.433773 -0.119208 1.17997
1 2 3 4
Weights -0.391847 -0.128953 0.199535 1.053434
1 2 3 4
Weights 0.029267 0.053253 0.199685 0.235743
1 2 3 4
Weights -0.190033 -0.29355 -0.003103 1.005489
1 2 3 4
Weights -0.179467 -0.673864 -0.309696 1.846925
1 2 3 4
Weights -0.27626 -0.250135 0.040335 1.081207
1 2 3 4
Weights -0.159907 -0.376315 0.001731 1.100112
1 2 3 4
Weights -0.31105 -0.137044 0.030231 0.962901
1 2 3 4
Weights -0.356102 -0.399541 0.010562 1.316053
1 2 3 4
Weights -0.405486 -0.237289 0.123835 1.296129
1 2 3 4
Weights -0.229311 -0.153109 -0.017712 0.904762
1 2 3 4
Weights -0.272538 -0.202073 0.100517 0.848761
1 2 3 4
Weights -0.331104 -0.217083 0.058276 1.272358
1 2 3 4
Weights -0.206314 -0.332562 0.049634 1.118021
1 2 3 4
Weights -0.332384 -0.255239 0.053193 1.102343
1 2 3 4
Weights -0.188115 -0.357511 -0.061577 1.0715
1 2 3 4
Weights -0.260391 -0.015068 0.233146 0.520979
1 2 3 4
Weights -0.256633 -0.398403 -0.085763 1.329264
1 2 3 4
Weights -0.194891 -0.321688 0.047445 1.135641
1 2 3 4
Weights -0.436569 -0.429035 0.082711 1.52536
1 2 3 4
Weights -0.162813 -0.369496 -0.273355 1.314074
1 2 3 4
Weights -0.398874 -0.320414 0.249782 1.131594
1 2 3 4
Weights -0.462014 -0.403753 0.216654 1.334035
1 2 3 4
Weights -0.18981 -0.379547 -0.02882 1.090872
1 2 3 4
Weights -0.186325 -0.07971 0.178652 0.557234
1 2 3 4
Weights -0.295917 -0.181266 0.096725 1.0853
1 2 3 4
Weights -0.307834 -0.32414 0.145915 0.957835
1 2 3 4
Weights -0.284321 -0.331359 0.011331 1.184446
1 2 3 4
Weights -0.15117 0.02876 0.06178 0.844824
1 2 3 4
Weights -0.331243 -0.541207 0.165093 1.418255
1 2 3 4
Weights -0.314553 -0.351133 0.085089 1.186322
1 2 3 4
Weights -0.201828 -0.466763 -0.130565 1.421952
1 2 3 4
Weights -0.160122 -0.067523 -0.035453 0.891982
1 2 3 4
Weights -0.242202 -0.280966 0.063933 1.080952
1 2 3 4
Weights -0.134322 -0.032171 0.104604 0.598223
1 2 3 4
Weights -0.378768 -0.075947 0.152636 1.066866
1 2 3 4
Weights -0.429981 -0.40026 0.148677 1.334206
1 2 3 4
Weights -0.2505 -0.523419 -0.164244 1.477005
1 2 3 4
Weights -0.216871 -0.472101 -0.037819 1.266283
1 2 3 4
Weights -0.405796 -0.220811 0.081911 1.235288
1 2 3 4
Weights -0.349022 -0.287218 -0.004756 1.205336
1 2 3 4
Weights -0.237578 -0.453518 0.049684 1.333969
1 2 3 4
Weights -0.080173 0.017702 0.087877 0.532185
1 2 3 4
Weights -0.233133 -0.03499 0.128556 0.887071
1 2 3 4
Weights -0.333636 -0.226801 -0.043511 1.097973
1 2 3 4
Weights -0.292408 -0.424089 0.043035 1.383175
1 2 3 4
Weights -0.449906 -0.235526 0.047864 1.22181
1 2 3 4
Weights -0.423416 -0.344598 0.151941 1.208705
1 2 3 4
Weights -0.329438 -0.112035 0.127353 1.057383
1 2 3 4
Weights -0.25241 -0.483346 -0.003837 1.210313
1 2 3 4
Weights -0.351271 -0.172316 0.162273 1.056534
1 2 3 4
Weights -0.130082 -0.2309 -0.078298 0.842488
1 2 3 4
Weights -0.176323 -0.159909 0.011229 0.796826
1 2 3 4
Weights -0.487049 -0.386883 0.196998 1.26608
1 2 3 4
Weights -0.185651 -0.305635 0.15844 0.99877
1 2 3 4
Weights -0.361647 -0.091857 0.12062 1.0117
1 2 3 4
Weights -0.098267 -0.565421 -0.199299 1.413273
1 2 3 4
Weights -0.330497 -0.193788 0.004629 1.056931
1 2 3 4
Weights -0.334639 -0.260783 -0.06296 1.22818
1 2 3 4
Weights -0.287108 -0.280461 0.077396 1.028166
1 2 3 4
Weights -0.481459 -0.17678 0.193579 1.103477
1 2 3 4
Weights -0.236711 -0.02284 0.095972 0.939408
1 2 3 4
Weights -0.161807 -0.263496 -0.173873 1.053658
1 2 3 4
Weights -0.194682 -0.222465 0.100667 0.977854
1 2 3 4
Weights -0.363954 -0.100862 0.106598 1.071701
1 2 3 4
Weights -0.245373 -0.299881 -0.025428 1.243773
1 2 3 4
Weights -0.377757 -0.345721 0.055092 1.403795
1 2 3 4
Weights -0.277474 -0.313353 0.107057 1.052069
1 2 3 4
Weights -0.071705 -0.382327 -0.146563 1.123864
1 2 3 4
Weights -0.178345 -0.457407 -0.146279 1.304785
1 2 3 4
Weights -0.267859 -0.207242 0.087473 1.045871
1 2 3 4
Weights -0.100429 0.072777 0.13772 0.641883
1 2 3 4
Weights -0.358158 -0.222896 0.141363 0.993853
1 2 3 4
Weights -0.365784 -0.373094 0.111998 1.18565
1 2 3 4
Weights -0.308799 -0.014994 0.237858 0.901624
1 2 3 4
Weights -0.030329 -0.476631 -0.372476 1.388301
1 2 3 4
Weights -0.321788 -0.171022 0.019501 1.106834
1 2 3 4
Weights -0.429262 -0.357679 0.168667 1.402751
1 2 3 4
Weights -0.042545 -0.44125 -0.334871 1.272448
1 2 3 4
Weights -0.198771 -0.077394 0.131379 0.706697
1 2 3 4
Weights -0.380331 -0.393884 0.167093 1.283947
1 2 3 4
Weights -0.228139 -0.07579 0.140962 0.643585
1 2 3 4
Weights -0.059683 -0.504606 -0.214481 1.34711
1 2 3 4
Weights -0.245329 -0.301082 -0.058451 1.122103
1 2 3 4
Weights -0.283 -0.198123 0.064879 0.952468
1 2 3 4
Weights -0.312947 -0.393224 0.129618 1.265117
1 2 3 4
Weights -0.243751 -0.366383 -0.176222 1.292383
1 2 3 4
Weights -0.523145 -0.19018 0.145578 1.228956
1 2 3 4
Weights -0.328134 -0.314985 -0.182132 1.439225
1 2 3 4
Weights -0.033613 -0.561027 -0.262222 1.370064
1 2 3 4
Weights -0.030224 -0.475836 -0.410859 1.428282
1 2 3 4
Weights -0.205613 -0.128413 -0.060502 0.977563
1 2 3 4
Weights -0.318343 -0.236131 0.063089 1.010486
1 2 3 4
Weights -0.232561 -0.36047 -0.037483 1.116213
1 2 3 4
Weights -0.233069 -0.415947 -0.111197 1.312369
1 2 3 4
Weights -0.298816 -0.282989 -0.013754 1.214967
1 2 3 4
Weights -0.238994 -0.046934 0.230338 0.958454
1 2 3 4
Weights -0.415027 -0.194347 0.202808 1.233091
1 2 3 4
Weights -0.099569 -0.118422 0.102745 0.729887
1 2 3 4
Weights -0.189505 -0.13685 0.041396 0.840618
1 2 3 4
Weights -0.242103 -0.31126 -0.070258 1.149838
1 2 3 4
Weights -0.293747 -0.496178 -0.066736 1.419591
1 2 3 4
Weights -0.197408 -0.658362 0.006112 1.560772
1 2 3 4
Weights -0.195972 0.026407 0.120602 0.512136
1 2 3 4
Weights -0.423474 -0.246574 0.181731 1.155382
1 2 3 4
Weights -0.281971 -0.514091 -0.002181 1.475532
1 2 3 4
Weights -0.371849 -0.139629 -0.095299 1.321674
1 2 3 4
Weights -0.315288 -0.433173 0.0946 1.399501
1 2 3 4
Weights -0.315263 -0.332764 0.230147 0.899266
1 2 3 4
Weights -0.222477 -0.115676 0.077364 0.845741
1 2 3 4
Weights -0.161349 0.088521 0.145707 0.732345
1 2 3 4
Weights -0.217616 -0.222506 0.002339 1.034525
1 2 3 4
Weights -0.382169 -0.185081 0.251786 0.995707
1 2 3 4
Weights -0.543773 -0.136315 0.106588 1.14705
1 2 3 4
Weights -0.17219 -0.116277 0.010935 0.827411
1 2 3 4
Weights -0.199638 -0.158019 0.047707 1.012058
1 2 3 4
Weights -0.171967 -0.498114 -0.170675 1.332967
1 2 3 4
Weights -0.120457 -0.560214 -0.160542 1.384227
1 2 3 4
Weights -0.145112 -0.096661 0.143077 0.857995
1 2 3 4
Weights -0.212163 -0.669942 -0.069897 1.52772
Predicting t+4...
t+1 t+2 t+3 t+4
R2 0.962349 0.928844 0.874857 0.814177
RMSE 70.726932 97.747881 130.354863 159.746639
MSE 5002.298976 9554.648148 16992.390247 25518.988652
MAE 0.352292 0.488578 0.615579 0.736701
MAPE 35.229176 48.857792 61.557867 73.670140
MPE -19.865985 -30.286623 -39.843389 -49.336384
CPU times: user 59.3 s, sys: 1.36 s, total: 1min
Wall time: 1min
In [80]:
plot_diff_along_time_per_t(X_test, art_preds_s)
In [81]:
j, s = 2, 100
plot_bispecific(X_test, baseline_preds, art_preds, art_preds_s, order, limit_t, j, s)
In [82]:
plot_qualitative_analysis(art_preds, X_test, limit_t, order, subway_stations, del_hours)
In [83]:
plot_qualitative_analysis(art_preds_s, X_test, limit_t, order, subway_stations, del_hours)
In [84]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(art_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per t")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="General baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of General baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [85]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(art_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station per t")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [86]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])
x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(art_scores).T[1], np.array(art_scores_s).T[1])).T
baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["General baseline", "Baseline per station", "AR per t", "AR per station per t"]
arlineObjects = plt.plot(x, model_scores, linewidth=3)

# ['D', '*', '|', 'X']
# labels = ["Full baseline", "Baseline per station", "Baseline per day",
#                    "Baseline per station and day", "Full AR", "AR per station"]
for i, m in zip(range(4), ['D', '*']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)
    
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

AR per t + Baseline

In [87]:
def baseline_sub(X, baseline):
    
    return X.apply(lambda x: x - baseline.iloc[0], axis=(1, 2))
    
def baseline_add(X, baseline):
    
    return X.apply(lambda x: x + baseline.iloc[0], axis=(1, 2))
    
In [88]:
order, limit_t = 4, 4
In [89]:
# Minus Baseline per station
Xb_train = baseline_sub(X_train, baseline_preds[1])
Xb_test = baseline_sub(X_test, baseline_preds[1])

# Minus Baseline per station
Xb_train_s = baseline_sub(X_train, baseline_preds[1])
Xb_test_s = baseline_sub(X_test, baseline_preds[1])
In [90]:
Xb_min_max_s = Xb_train_s.apply(lambda x: (x.min().min(), x.max().max()), axis=(0, 2))
In [91]:
Xbn_train_s = Xb_train_s.apply(lambda x: a + ((x - x.min().min()) * (b - a)) / (x.max().max() - x.min().min()), 
                               axis=(0, 2)).transpose(2, 0, 1)
# Normalize Xb_test
Xbn_test_s = pd.Panel(np.array(list(map(lambda station_id: normalize(
    Xb_test_s, Xb_min_max_s, station_id, a=a, b=b), 
                                        Xb_test_s.transpose(1, 0, 2)))).transpose(2, 0, 1),
                      items=list(Xb_test_s.items),
                      major_axis=subway_stations,
                      minor_axis=generate_times("15min")[(del_hours * 4):])

# Denormalize Xb_test
Xbdn_test_s = pd.Panel(np.array(list(map(lambda station_id: denormalize(
    Xbn_test_s, Xb_min_max_s, station_id, a=a, b=b), 
                                        Xb_test_s.transpose(1, 0, 2)))).transpose(2, 0, 1),
                      items=list(Xb_test_s.items),
                      major_axis=subway_stations,
                      minor_axis=generate_times("15min")[(del_hours * 4):])
In [92]:
%%time
artb_preds, artb_scores = art_plot_results(None, order=order, limit_t=limit_t, X_train=Xbn_train_s,
                                           X_test=Xbn_test_s, X_min_max=Xb_min_max_s, a=a, b=b)

for t in range(limit_t):
    artb_preds[t] = baseline_add(artb_preds[t], baseline_preds[1])
Predicting...
Fitting t+1...
1 2 3 4
Weights -0.02997 -0.007437 0.287751 0.695926
Predicting t+1...
Fitting t+2...
1 2 3 4
Weights -0.152397 0.056112 0.229684 0.768109
Predicting t+2...
Fitting t+3...
1 2 3 4
Weights -0.132739 -0.083579 0.309297 0.760745
Predicting t+3...
Fitting t+4...
1 2 3 4
Weights -0.123808 -0.07186 0.165967 0.836236
Predicting t+4...
t+1 t+2 t+3 t+4
R2 8.459523e-01 7.619975e-01 6.396674e-01 5.087204e-01
RMSE 5.721493e+01 7.158484e+01 8.866352e+01 1.042114e+02
MSE 3.273548e+03 5.124389e+03 7.861220e+03 1.086001e+04
MAE -2.346982e+10 -2.762460e+10 -3.807056e+10 -4.962539e+10
MAPE -2.346982e+12 -2.762460e+12 -3.807056e+12 -4.962539e+12
MPE 1.559500e+12 2.298167e+12 2.731662e+12 2.367798e+12
CPU times: user 7.96 s, sys: 5.17 s, total: 13.1 s
Wall time: 13.3 s
In [93]:
plot_diff_along_time(X_test, artb_preds)
In [94]:
%%time
artb_preds_s, artb_scores_s = art_plot_results("s", order, limit_t,  
                                               X_train=Xbn_train_s, 
                                               X_test=Xbn_test_s,
                                               X_min_max=Xb_min_max_s, a=a, b=b)
for t in range(limit_t):
    artb_preds_s[t] = baseline_add(artb_preds_s[t], baseline_preds[1]) 
Predicting...
Fitting t+1...
1 2 3 4
Weights -0.151558 0.102607 0.322026 0.601723
1 2 3 4
Weights 0.041706 0.073316 0.191044 0.529377
1 2 3 4
Weights -0.135022 -0.130593 0.250507 0.90533
1 2 3 4
Weights -0.031072 -0.044133 0.389985 0.573815
1 2 3 4
Weights 0.142115 0.090334 0.160761 0.562643
1 2 3 4
Weights -0.110814 -0.059495 0.192758 0.865118
1 2 3 4
Weights -0.028911 0.042138 0.312355 0.600317
1 2 3 4
Weights -0.161928 -0.029374 0.258065 0.82988
1 2 3 4
Weights -0.133476 -0.039783 0.206957 0.87954
1 2 3 4
Weights -0.082662 -0.161168 0.109018 1.038095
1 2 3 4
Weights -0.081539 -0.122675 0.520434 0.556939
1 2 3 4
Weights 0.008419 0.023854 0.23796 0.616796
1 2 3 4
Weights 0.330964 -0.107073 0.289914 0.401104
1 2 3 4
Weights 0.030721 -0.036177 0.370298 0.566179
1 2 3 4
Weights 0.028146 -0.042285 0.246306 0.710248
1 2 3 4
Weights -0.065209 -0.070725 0.198175 0.844521
1 2 3 4
Weights -0.064546 0.00717 0.237114 0.734168
1 2 3 4
Weights 0.060565 0.15032 0.233907 0.438679
1 2 3 4
Weights -0.074263 0.051188 0.314403 0.625617
1 2 3 4
Weights -0.042034 -0.192672 0.257845 0.878572
1 2 3 4
Weights -0.127013 -0.018267 0.273906 0.765638
1 2 3 4
Weights -0.114608 -0.058098 0.278996 0.774814
1 2 3 4
Weights -0.123301 -0.128929 0.234812 0.900728
1 2 3 4
Weights -0.174497 0.049936 0.039826 1.01033
1 2 3 4
Weights 0.006991 -0.06234 0.357582 0.636177
1 2 3 4
Weights 0.073911 -0.02157 0.273259 0.587016
1 2 3 4
Weights -0.105408 0.115143 0.31034 0.572212
1 2 3 4
Weights 0.064441 -0.022938 0.331934 0.54844
1 2 3 4
Weights 0.0006 0.021808 0.285301 0.566521
1 2 3 4
Weights 0.09071 -0.205097 0.288281 0.744782
1 2 3 4
Weights -0.123155 -0.144028 0.388214 0.8205
1 2 3 4
Weights -0.077814 -0.141947 0.209549 0.908939
1 2 3 4
Weights -0.033857 -0.021773 0.21141 0.750786
1 2 3 4
Weights -0.021585 -0.094092 0.285667 0.746826
1 2 3 4
Weights 0.232502 0.103579 0.068336 0.484251
1 2 3 4
Weights 0.034626 -0.059489 0.310781 0.653544
1 2 3 4
Weights 0.024371 0.091974 0.170086 0.609767
1 2 3 4
Weights -0.055857 0.012771 0.238266 0.6705
1 2 3 4
Weights -0.025987 -0.138191 0.244551 0.854979
1 2 3 4
Weights -0.087056 -0.183707 0.337376 0.867254
1 2 3 4
Weights 0.163586 0.129079 0.230739 0.34979
1 2 3 4
Weights 0.142299 0.10757 0.043735 0.590122
1 2 3 4
Weights -0.000269 0.154696 0.240995 0.520317
1 2 3 4
Weights -0.038769 0.035733 0.217108 0.662544
1 2 3 4
Weights -0.073953 0.019322 0.30043 0.666164
1 2 3 4
Weights -0.064199 -0.304735 0.339604 0.931938
1 2 3 4
Weights -0.009867 0.033798 0.205712 0.666018
1 2 3 4
Weights -0.115888 -0.042485 0.228415 0.859431
1 2 3 4
Weights -0.103669 -0.086075 0.409039 0.695837
1 2 3 4
Weights 0.279947 0.1414 0.120786 0.378696
1 2 3 4
Weights -0.056807 -0.076467 0.151931 0.913383
1 2 3 4
Weights 0.024659 0.052972 0.301532 0.496113
1 2 3 4
Weights -0.044313 0.03778 0.28864 0.64172
1 2 3 4
Weights 0.038347 -0.174266 0.255993 0.817104
1 2 3 4
Weights -0.066896 0.222874 0.372744 0.342339
1 2 3 4
Weights 0.146284 0.231238 0.068217 0.409804
1 2 3 4
Weights -0.170109 -0.0242 0.248691 0.864876
1 2 3 4
Weights 0.131925 -0.012648 0.170191 0.596569
1 2 3 4
Weights -0.01406 -0.227056 0.194473 0.92864
1 2 3 4
Weights 0.019684 0.026681 0.265989 0.588421
1 2 3 4
Weights -0.086181 -0.052615 0.175391 0.880703
1 2 3 4
Weights 0.19486 0.109211 0.124588 0.456077
1 2 3 4
Weights 0.209718 0.053946 0.233974 0.430801
1 2 3 4
Weights -0.081314 0.114184 0.357417 0.493347
1 2 3 4
Weights 0.146211 0.037265 -0.06085 0.782657
1 2 3 4
Weights -0.103496 -0.128212 0.146924 0.991409
1 2 3 4
Weights -0.133481 0.047657 0.314221 0.629858
1 2 3 4
Weights -0.159652 -0.103037 0.152388 1.032889
1 2 3 4
Weights 0.098934 0.042118 0.124665 0.659295
1 2 3 4
Weights -0.100917 -0.141201 0.13915 0.980194
1 2 3 4
Weights -0.006025 0.16342 0.32425 0.38821
1 2 3 4
Weights -0.157787 0.00592 0.45189 0.610002
1 2 3 4
Weights -0.087775 -0.022848 0.096467 0.934897
1 2 3 4
Weights -0.093056 0.050131 0.3961 0.488717
1 2 3 4
Weights 0.138403 0.108002 0.246236 0.410265
1 2 3 4
Weights 0.27211 0.074957 0.064607 0.323308
1 2 3 4
Weights -0.020268 -0.112834 0.485786 0.58877
1 2 3 4
Weights -0.105679 -0.006017 0.375809 0.666016
1 2 3 4
Weights -0.165855 -0.0314 0.306357 0.787185
1 2 3 4
Weights 0.035608 0.102922 0.224701 0.481706
1 2 3 4
Weights -0.059948 0.038751 0.301804 0.602339
1 2 3 4
Weights -0.057999 -0.211968 0.356136 0.813837
1 2 3 4
Weights -0.090209 -0.002306 0.051968 0.938411
1 2 3 4
Weights 0.095461 -0.004522 0.055382 0.753705
1 2 3 4
Weights -0.145007 -0.06302 0.227848 0.883657
1 2 3 4
Weights -0.091667 -0.152019 0.306845 0.814158
1 2 3 4
Weights -0.174102 0.002888 0.369502 0.713418
1 2 3 4
Weights -0.049197 0.060146 0.218152 0.66191
1 2 3 4
Weights -0.138325 -0.084012 0.334861 0.786313
1 2 3 4
Weights -0.006345 0.014071 0.351431 0.535019
1 2 3 4
Weights -0.101724 -0.17953 0.205889 0.982587
1 2 3 4
Weights -0.081718 -0.219656 0.331621 0.872073
1 2 3 4
Weights -0.098332 0.004909 0.299049 0.674658
1 2 3 4
Weights -0.014306 -0.050175 0.234944 0.768784
1 2 3 4
Weights -0.087497 -0.097828 0.355432 0.721445
1 2 3 4
Weights -0.034128 -0.085276 0.292345 0.747642
1 2 3 4
Weights -0.163516 -0.077027 0.213886 0.917957
1 2 3 4
Weights -0.211266 -0.156702 0.44689 0.823182
1 2 3 4
Weights -0.073189 -0.062636 0.372848 0.686651
1 2 3 4
Weights -0.053298 -0.261212 0.45872 0.769659
1 2 3 4
Weights -0.042315 -0.082144 0.408387 0.633205
1 2 3 4
Weights 0.10406 0.006269 0.327012 0.493616
1 2 3 4
Weights -0.09996 0.011771 0.248274 0.742215
1 2 3 4
Weights -0.032106 -0.037823 0.230356 0.782455
1 2 3 4
Weights -0.002594 0.053935 0.148068 0.705889
1 2 3 4
Weights -0.071819 0.037002 0.174829 0.771685
1 2 3 4
Weights 0.027108 0.073774 0.237519 0.527074
1 2 3 4
Weights -0.17575 -0.095065 0.319119 0.850774
1 2 3 4
Weights -0.003475 0.084535 0.324177 0.488881
1 2 3 4
Weights -0.171262 0.006962 0.434752 0.606821
1 2 3 4
Weights -0.037645 0.095093 0.218427 0.609201
1 2 3 4
Weights -0.054119 -0.172073 0.467368 0.70887
1 2 3 4
Weights 0.07251 0.056277 0.324694 0.427193
1 2 3 4
Weights -0.041282 0.015196 0.165544 0.721604
1 2 3 4
Weights -0.109148 0.050325 0.248617 0.733931
1 2 3 4
Weights -0.019754 0.035493 0.285094 0.627787
1 2 3 4
Weights -0.074027 0.008725 0.251215 0.711182
1 2 3 4
Weights 0.046013 0.071709 0.262896 0.449969
1 2 3 4
Weights -0.086061 -0.066091 0.275091 0.738925
1 2 3 4
Weights 0.093937 -0.164092 0.247882 0.741627
1 2 3 4
Weights -0.11915 0.057285 0.265082 0.671072
1 2 3 4
Weights 0.011049 -0.099956 0.274566 0.708292
1 2 3 4
Weights -0.10023 -0.123662 0.365368 0.778231
1 2 3 4
Weights 0.155425 0.210131 0.082626 0.503319
1 2 3 4
Weights -0.072049 -0.010104 0.275715 0.724749
1 2 3 4
Weights 0.067417 0.070596 0.22334 0.566802
1 2 3 4
Weights 0.009285 -0.001087 0.3675 0.553527
1 2 3 4
Weights -0.05797 -0.13958 0.502899 0.634769
1 2 3 4
Weights -0.068474 -0.130223 0.320702 0.78545
1 2 3 4
Weights 0.214124 0.178319 0.066477 0.457221
1 2 3 4
Weights -0.126699 -0.02769 0.25708 0.789364
1 2 3 4
Weights -0.088993 -0.163904 0.337587 0.849452
1 2 3 4
Weights -0.057232 0.075208 0.136122 0.761927
1 2 3 4
Weights 0.102179 -0.146909 0.304346 0.630047
1 2 3 4
Weights -0.039102 0.030124 0.214324 0.753349
1 2 3 4
Weights -0.15802 -0.123562 0.313123 0.855601
1 2 3 4
Weights 0.025471 -0.012048 0.264894 0.662349
1 2 3 4
Weights 0.093436 0.080119 0.202577 0.519442
1 2 3 4
Weights 0.058889 -0.006286 0.159288 0.705727
1 2 3 4
Weights -0.17844 -0.073602 0.244737 0.902646
1 2 3 4
Weights -0.051088 0.029965 0.240842 0.700386
1 2 3 4
Weights -0.135808 -0.01957 0.30163 0.732022
1 2 3 4
Weights -0.144302 -0.018847 0.305812 0.755223
1 2 3 4
Weights 0.097015 0.011277 0.23231 0.54798
1 2 3 4
Weights -0.036868 0.075414 0.297563 0.574904
1 2 3 4
Weights -0.039481 -0.047052 0.275727 0.748387
1 2 3 4
Weights -0.1199 0.002019 0.159226 0.874048
1 2 3 4
Weights -0.040132 -0.14469 0.41152 0.700602
1 2 3 4
Weights -0.188986 -0.033629 0.426532 0.707989
1 2 3 4
Weights -0.051117 -0.069268 0.202685 0.834437
1 2 3 4
Weights 0.148618 -0.110545 0.32427 0.538748
1 2 3 4
Weights -0.074811 0.046766 0.358784 0.580302
1 2 3 4
Weights -0.093824 0.056659 0.361218 0.586619
1 2 3 4
Weights 0.037503 0.046425 0.320908 0.433889
1 2 3 4
Weights 0.106388 0.060823 0.227957 0.538444
1 2 3 4
Weights -0.196461 -0.069495 0.232181 0.924986
1 2 3 4
Weights -0.065101 -0.009502 0.186195 0.776962
1 2 3 4
Weights -0.018101 0.106673 0.184192 0.668408
1 2 3 4
Weights 0.055547 -0.153588 0.283512 0.76108
1 2 3 4
Weights -0.080843 -0.107799 0.302413 0.787889
1 2 3 4
Weights -0.086512 0.076501 0.162654 0.749818
1 2 3 4
Weights -0.146135 0.067748 0.311458 0.656482
1 2 3 4
Weights 0.034526 -0.004364 0.361496 0.536057
1 2 3 4
Weights -0.080265 -0.018223 0.247781 0.74828
1 2 3 4
Weights 0.042772 0.066479 0.249441 0.583874
1 2 3 4
Weights 0.017642 0.121519 0.338542 0.413955
1 2 3 4
Weights -0.118952 0.004571 0.30575 0.727382
1 2 3 4
Weights -0.070599 0.024016 0.240031 0.694749
1 2 3 4
Weights -0.144538 0.010519 0.273951 0.769943
1 2 3 4
Weights -0.208162 -0.033623 0.344006 0.809296
1 2 3 4
Weights -0.186725 -0.031015 0.274562 0.847297
1 2 3 4
Weights -0.230601 -0.087063 0.453658 0.764597
1 2 3 4
Weights -0.063967 -0.017967 0.253571 0.705947
1 2 3 4
Weights -0.171025 -0.074456 0.162063 1.011314
1 2 3 4
Weights 0.03348 0.004785 0.217136 0.644447
1 2 3 4
Weights -0.126152 -0.03858 0.340357 0.737439
1 2 3 4
Weights -0.055131 -0.061155 0.288083 0.783386
1 2 3 4
Weights -0.17896 -0.060321 0.444982 0.689485
1 2 3 4
Weights 0.017322 0.233299 -0.482099 1.0497
1 2 3 4
Weights -0.146121 0.025878 0.430259 0.562335
1 2 3 4
Weights 0.167569 -0.178639 0.500058 0.447829
1 2 3 4
Weights -0.177532 -0.037077 0.338651 0.756292
1 2 3 4
Weights 0.009765 0.062895 0.289196 0.543815
1 2 3 4
Weights 0.131413 0.014758 -0.017518 0.4536
1 2 3 4
Weights 0.01678 -0.098294 0.318857 0.655304
1 2 3 4
Weights -0.10864 -0.221857 0.164873 1.118532
1 2 3 4
Weights -0.066749 0.026039 0.265614 0.684496
1 2 3 4
Weights -0.050241 -0.118996 0.398607 0.684868
1 2 3 4
Weights 0.049827 0.033433 0.136562 0.684619
1 2 3 4
Weights -0.184178 0.001392 0.323188 0.771285
1 2 3 4
Weights -0.029428 0.016325 0.207542 0.745107
1 2 3 4
Weights -0.067862 0.062636 0.16706 0.726673
1 2 3 4
Weights -0.046283 0.06591 0.206534 0.663517
1 2 3 4
Weights 0.00742 0.054824 0.197227 0.691347
1 2 3 4
Weights -0.025602 -0.094326 0.342466 0.697986
1 2 3 4
Weights -0.101536 0.032612 0.297162 0.66986
1 2 3 4
Weights -0.156677 -0.004 0.347122 0.689922
1 2 3 4
Weights 0.098101 0.084755 0.063943 0.52841
1 2 3 4
Weights -0.153657 -0.053952 0.276009 0.844314
1 2 3 4
Weights -0.112273 -0.004811 0.36563 0.667836
1 2 3 4
Weights -0.059322 -0.146441 0.338474 0.818822
1 2 3 4
Weights -0.109553 -0.094701 0.172907 0.933739
1 2 3 4
Weights -0.048359 -0.058952 0.396922 0.638556
1 2 3 4
Weights -0.014973 -0.163251 0.398017 0.726214
1 2 3 4
Weights -0.168255 -0.014452 0.351568 0.708619
1 2 3 4
Weights 0.046919 0.095794 0.226101 0.433765
1 2 3 4
Weights 0.097505 -0.056296 0.257258 0.633515
1 2 3 4
Weights -0.136972 0.038756 0.463735 0.513926
1 2 3 4
Weights -0.112959 -0.053637 0.313398 0.759777
1 2 3 4
Weights 0.352551 0.110278 0.065152 0.400091
1 2 3 4
Weights -0.015293 -0.318979 0.495257 0.790883
1 2 3 4
Weights -0.04541 -0.081627 0.361356 0.69198
1 2 3 4
Weights -0.134886 -0.112043 0.322206 0.860461
1 2 3 4
Weights 0.042781 -0.020693 0.197421 0.69008
1 2 3 4
Weights 0.03872 -0.138565 0.358928 0.652774
1 2 3 4
Weights 0.116964 0.135352 0.137577 0.450523
1 2 3 4
Weights 0.066153 0.044411 0.224366 0.574778
1 2 3 4
Weights -0.113432 -0.092841 0.401302 0.739097
1 2 3 4
Weights -0.181637 -0.057896 0.308046 0.851821
1 2 3 4
Weights -0.070598 -0.189338 0.400637 0.771367
1 2 3 4
Weights -0.074021 0.02702 0.248369 0.711709
1 2 3 4
Weights -0.125641 0.004056 0.255887 0.768467
1 2 3 4
Weights -0.056027 -0.171023 0.445345 0.717427
1 2 3 4
Weights 0.061299 0.087747 0.095285 0.566967
1 2 3 4
Weights 0.175714 -0.010507 0.256409 0.512148
1 2 3 4
Weights -0.07689 0.003517 0.147879 0.813817
1 2 3 4
Weights -0.006247 -0.217338 0.37197 0.799256
1 2 3 4
Weights -0.142621 0.036745 0.253994 0.763749
1 2 3 4
Weights -0.185144 0.044118 0.38992 0.669736
1 2 3 4
Weights -0.00568 0.024929 0.275301 0.650002
1 2 3 4
Weights -0.151996 -0.07567 0.36204 0.750971
1 2 3 4
Weights -0.001826 0.046604 0.277581 0.614327
1 2 3 4
Weights 0.007121 -0.156173 0.225985 0.772841
1 2 3 4
Weights 0.023434 0.009742 0.189407 0.663888
1 2 3 4
Weights -0.183734 -0.008558 0.453393 0.658555
1 2 3 4
Weights 0.094547 -0.102354 0.421323 0.506197
1 2 3 4
Weights -0.019632 0.096522 0.199619 0.646599
1 2 3 4
Weights -0.152482 -0.127711 0.325126 0.862974
1 2 3 4
Weights -0.111317 0.014438 0.214141 0.787043
1 2 3 4
Weights -0.041298 -0.115159 0.252287 0.82968
1 2 3 4
Weights -0.076613 0.000842 0.336061 0.62634
1 2 3 4
Weights -0.087171 0.04346 0.373342 0.576088
1 2 3 4
Weights 0.20186 -0.001082 0.156539 0.572299
1 2 3 4
Weights -0.052946 -0.071253 0.242287 0.768434
1 2 3 4
Weights -0.003712 -0.023749 0.354397 0.594607
1 2 3 4
Weights 0.049447 0.058464 0.173254 0.623336
1 2 3 4
Weights -0.061737 -0.09824 0.254984 0.818112
1 2 3 4
Weights -0.044583 -0.140443 0.361861 0.772865
1 2 3 4
Weights -0.111793 0.012124 0.380587 0.631838
1 2 3 4
Weights -0.064076 -0.139109 0.400089 0.704795
1 2 3 4
Weights -0.170851 -0.024113 0.273409 0.816172
1 2 3 4
Weights 0.002585 0.033135 0.228791 0.645988
1 2 3 4
Weights 0.109373 0.089104 0.113537 0.592631
1 2 3 4
Weights -0.055323 0.049956 0.297245 0.617811
1 2 3 4
Weights -0.150545 0.014271 0.376916 0.667206
1 2 3 4
Weights 0.105559 0.098751 0.197886 0.500678
1 2 3 4
Weights -0.105709 -0.166715 0.268138 0.91009
1 2 3 4
Weights 0.018426 0.073049 0.166672 0.669638
1 2 3 4
Weights 0.056508 -0.133286 0.357766 0.66337
1 2 3 4
Weights -0.150546 -0.062469 0.189249 0.915514
1 2 3 4
Weights 0.031289 0.066664 0.276832 0.482444
1 2 3 4
Weights -0.044703 -0.073223 0.378138 0.669938
1 2 3 4
Weights 0.061381 0.083409 0.095808 0.588907
1 2 3 4
Weights -0.070719 -0.204416 0.341202 0.85395
1 2 3 4
Weights -0.119952 -0.028597 0.2173 0.8179
1 2 3 4
Weights -0.039813 -0.002958 0.315426 0.624105
1 2 3 4
Weights -0.053681 -0.096203 0.429209 0.656862
1 2 3 4
Weights -0.125434 -0.079634 0.237113 0.874122
1 2 3 4
Weights -0.112289 0.063559 0.286518 0.693465
1 2 3 4
Weights -0.086079 -0.101565 0.15655 0.968775
1 2 3 4
Weights -0.133643 -0.145532 0.338249 0.844701
1 2 3 4
Weights -0.124586 -0.123363 0.175634 0.980445
1 2 3 4
Weights -0.062992 0.110964 0.180232 0.671907
1 2 3 4
Weights -0.095665 0.022676 0.318135 0.648432
1 2 3 4
Weights -0.10493 -0.070022 0.392863 0.674295
1 2 3 4
Weights -0.160011 -0.043404 0.264103 0.845823
1 2 3 4
Weights -0.097158 0.057196 0.174167 0.780712
1 2 3 4
Weights -0.058491 0.041906 0.31307 0.680826
1 2 3 4
Weights 0.099661 0.021931 0.222917 0.616257
1 2 3 4
Weights -0.012176 0.096964 0.236719 0.538271
1 2 3 4
Weights 0.075484 -0.060216 0.196788 0.6686
1 2 3 4
Weights -0.1067 -0.042557 0.248283 0.799475
1 2 3 4
Weights -0.215974 -0.021202 0.344308 0.8114
1 2 3 4
Weights -0.096942 -0.253794 0.463976 0.833064
1 2 3 4
Weights 0.018116 0.169106 0.247754 0.354267
1 2 3 4
Weights 0.01466 -0.04532 0.286857 0.649215
1 2 3 4
Weights -0.18857 -0.057004 0.353448 0.827163
1 2 3 4
Weights -0.023456 -0.017528 0.14072 0.837238
1 2 3 4
Weights -0.034142 -0.211253 0.452698 0.738741
1 2 3 4
Weights -0.053769 0.035407 0.416388 0.481203
1 2 3 4
Weights -0.007887 0.121737 0.16108 0.618585
1 2 3 4
Weights 0.097325 0.123376 0.179632 0.533348
1 2 3 4
Weights -0.04582 -0.011302 0.277484 0.691321
1 2 3 4
Weights 0.094 -0.001849 0.296214 0.536416
1 2 3 4
Weights -0.06219 0.070758 0.199523 0.693788
1 2 3 4
Weights 0.065067 0.061018 0.080133 0.656692
1 2 3 4
Weights 0.06304 -0.031773 0.273995 0.611593
1 2 3 4
Weights -0.141156 -0.127444 0.351497 0.816942
1 2 3 4
Weights -0.141454 -0.14309 0.334545 0.844515
1 2 3 4
Weights 0.104398 0.063477 0.247888 0.510529
1 2 3 4
Weights -0.180903 -0.11672 0.39546 0.829254
Predicting t+1...
Fitting t+2...
1 2 3 4
Weights -0.22505 -0.006429 0.330185 0.686643
1 2 3 4
Weights -0.047066 0.117028 0.189653 0.477105
1 2 3 4
Weights -0.240074 -0.142534 0.122083 1.039986
1 2 3 4
Weights -0.17876 0.036319 0.241021 0.708641
1 2 3 4
Weights 0.036304 0.220158 0.189176 0.483344
1 2 3 4
Weights -0.207098 -0.064316 0.126516 0.923975
1 2 3 4
Weights -0.202996 0.108139 0.286226 0.676698
1 2 3 4
Weights -0.240155 -0.095474 0.207712 0.92937
1 2 3 4
Weights -0.241184 -0.056559 0.163489 0.961775
1 2 3 4
Weights -0.147507 -0.18481 -0.042884 1.172741
1 2 3 4
Weights -0.206308 -0.058149 0.244664 0.803509
1 2 3 4
Weights -0.049314 0.056676 0.183386 0.620319
1 2 3 4
Weights 0.121263 0.292694 0.014166 0.451156
1 2 3 4
Weights -0.090769 0.071432 0.214774 0.688833
1 2 3 4
Weights -0.111043 0.090695 0.166172 0.748009
1 2 3 4
Weights -0.202877 0.001256 0.123593 0.893192
1 2 3 4
Weights -0.15317 0.018026 0.205837 0.771556
1 2 3 4
Weights -0.074739 0.172142 0.27856 0.444067
1 2 3 4
Weights -0.189786 0.047616 0.289425 0.706653
1 2 3 4
Weights -0.139323 -0.120396 0.057766 1.00766
1 2 3 4
Weights -0.211156 -0.05185 0.218478 0.847329
1 2 3 4
Weights -0.195263 -0.075713 0.185129 0.863334
1 2 3 4
Weights -0.202105 -0.155886 0.102083 1.024631
1 2 3 4
Weights -0.216856 -0.081892 0.090346 1.05632
1 2 3 4
Weights -0.22157 0.110993 0.24681 0.74902
1 2 3 4
Weights -0.13773 0.167935 0.191948 0.621859
1 2 3 4
Weights -0.212224 0.046351 0.335485 0.646276
1 2 3 4
Weights -0.05963 0.103639 0.192834 0.63396
1 2 3 4
Weights -0.153324 0.10001 0.227068 0.609828
1 2 3 4
Weights -0.071035 0.039386 0.053517 0.823821
1 2 3 4
Weights -0.250275 -0.114714 0.22617 1.02457
1 2 3 4
Weights -0.172202 -0.112999 0.068026 1.01427
1 2 3 4
Weights -0.272146 0.135434 0.187253 0.763585
1 2 3 4
Weights -0.153162 0.010971 0.156922 0.828907
1 2 3 4
Weights -0.099117 0.39687 0.155126 0.351212
1 2 3 4
Weights -0.176751 0.125169 0.209827 0.728569
1 2 3 4
Weights -0.118086 0.162241 0.218859 0.555962
1 2 3 4
Weights -0.150207 0.019672 0.201791 0.6866
1 2 3 4
Weights -0.210101 0.013944 0.116589 0.946048
1 2 3 4
Weights -0.28088 -0.064186 0.173504 1.035552
1 2 3 4
Weights 0.050623 0.211274 0.211747 0.353845
1 2 3 4
Weights 0.168263 0.153851 0.128863 0.375592
1 2 3 4
Weights -0.200477 0.184652 0.328054 0.542688
1 2 3 4
Weights -0.15406 0.06969 0.206554 0.657341
1 2 3 4
Weights -0.131903 -0.006043 0.242985 0.741559
1 2 3 4
Weights -0.098052 -0.312692 0.024301 1.194824
1 2 3 4
Weights -0.0698 0.055042 0.182939 0.651578
1 2 3 4
Weights -0.190268 -0.073264 0.172231 0.954361
1 2 3 4
Weights -0.236497 -0.046378 0.259384 0.86696
1 2 3 4
Weights 0.021785 0.37182 0.201142 0.286845
1 2 3 4
Weights -0.25605 0.06154 0.091183 0.960188
1 2 3 4
Weights -0.091589 0.102029 0.234146 0.555534
1 2 3 4
Weights -0.136883 0.049462 0.252845 0.701645
1 2 3 4
Weights -0.196914 0.078446 0.097548 0.89069
1 2 3 4
Weights -0.095946 0.033449 0.375903 0.504678
1 2 3 4
Weights -0.038337 0.285631 0.267008 0.265419
1 2 3 4
Weights -0.250201 -0.099228 0.212762 0.978906
1 2 3 4
Weights 0.004356 0.169421 0.103706 0.530763
1 2 3 4
Weights -0.005587 -0.23204 -0.04787 1.058824
1 2 3 4
Weights -0.088339 0.09358 0.211326 0.615944
1 2 3 4
Weights -0.300052 0.067921 0.136585 0.922966
1 2 3 4
Weights 0.006059 0.285623 0.178865 0.349818
1 2 3 4
Weights 0.024603 0.263691 0.174147 0.429213
1 2 3 4
Weights -0.152565 0.029863 0.327717 0.60913
1 2 3 4
Weights 0.045606 0.230856 -0.015588 0.562598
1 2 3 4
Weights -0.177449 -0.154027 0.026329 1.112946
1 2 3 4
Weights -0.206108 -0.02561 0.279033 0.706528
1 2 3 4
Weights -0.290951 -0.130427 0.070741 1.184864
1 2 3 4
Weights -0.146346 0.268775 0.152807 0.582673
1 2 3 4
Weights -0.114456 -0.223492 -0.004107 1.097272
1 2 3 4
Weights -0.035382 0.070067 0.299904 0.480529
1 2 3 4
Weights -0.309212 -0.021237 0.364614 0.804452
1 2 3 4
Weights -0.208878 0.010879 0.077928 0.957478
1 2 3 4
Weights -0.184598 -0.000627 0.294145 0.635586
1 2 3 4
Weights 0.059492 0.182167 0.208191 0.413096
1 2 3 4
Weights 0.059837 0.30752 0.10354 0.171895
1 2 3 4
Weights -0.273993 0.068075 0.29819 0.799723
1 2 3 4
Weights -0.186257 -0.03165 0.283687 0.810487
1 2 3 4
Weights -0.231174 -0.108449 0.235724 0.909646
1 2 3 4
Weights -0.043946 0.114944 0.225382 0.464127
1 2 3 4
Weights -0.13411 0.022699 0.248147 0.664967
1 2 3 4
Weights -0.152529 -0.143885 0.113546 0.991553
1 2 3 4
Weights -0.262744 0.076597 0.053675 0.91783
1 2 3 4
Weights 0.093861 0.075414 0.035803 0.622019
1 2 3 4
Weights -0.284025 -0.0586 0.168906 0.978765
1 2 3 4
Weights -0.148872 -0.154595 0.119193 0.953268
1 2 3 4
Weights -0.272431 -0.063267 0.313162 0.859941
1 2 3 4
Weights -0.129219 0.054554 0.224088 0.659408
1 2 3 4
Weights -0.235494 -0.101302 0.216645 0.928187
1 2 3 4
Weights -0.08182 0.042914 0.229483 0.638859
1 2 3 4
Weights -0.157427 -0.223213 0.036424 1.155821
1 2 3 4
Weights -0.150293 -0.202788 0.093629 1.069525
1 2 3 4
Weights -0.196058 -0.007065 0.242592 0.745983
1 2 3 4
Weights 0.00421 -0.065301 0.127464 0.827141
1 2 3 4
Weights -0.177993 -0.074562 0.19551 0.859273
1 2 3 4
Weights -0.156706 0.000079 0.168073 0.838819
1 2 3 4
Weights -0.204023 -0.182902 0.12872 1.044556
1 2 3 4
Weights -0.27512 -0.249535 0.248456 1.089536
1 2 3 4
Weights -0.220216 0.002245 0.252254 0.825006
1 2 3 4
Weights -0.225003 -0.109746 0.171592 0.995683
1 2 3 4
Weights -0.158967 -0.010455 0.227954 0.795669
1 2 3 4
Weights -0.004276 0.134885 0.189284 0.571069
1 2 3 4
Weights -0.211677 0.011756 0.226418 0.791053
1 2 3 4
Weights -0.186585 0.062545 0.182475 0.83109
1 2 3 4
Weights -0.142256 0.130237 0.178685 0.651459
1 2 3 4
Weights -0.193876 0.064227 0.193526 0.768272
1 2 3 4
Weights -0.002133 0.075131 0.203682 0.515561
1 2 3 4
Weights -0.279505 -0.14045 0.211651 1.010373
1 2 3 4
Weights -0.130076 0.100208 0.284493 0.573723
1 2 3 4
Weights -0.273404 -0.06099 0.33425 0.785448
1 2 3 4
Weights -0.165325 0.1067 0.258015 0.599928
1 2 3 4
Weights -0.285355 0.001443 0.269086 0.91779
1 2 3 4
Weights -0.018154 0.11722 0.212672 0.511742
1 2 3 4
Weights -0.134185 0.046253 0.151109 0.683863
1 2 3 4
Weights -0.263683 0.063194 0.27375 0.781859
1 2 3 4
Weights -0.105689 0.061016 0.24054 0.681483
1 2 3 4
Weights -0.1509 0.002478 0.20988 0.753169
1 2 3 4
Weights -0.043531 0.107327 0.207772 0.471467
1 2 3 4
Weights -0.172636 -0.053624 0.165236 0.80695
1 2 3 4
Weights -0.298563 0.242072 0.120312 0.762916
1 2 3 4
Weights -0.178406 -0.015177 0.260099 0.712727
1 2 3 4
Weights -0.181639 0.074235 0.147093 0.758783
1 2 3 4
Weights -0.251912 -0.058323 0.217702 0.937217
1 2 3 4
Weights -0.122062 0.371449 0.271125 0.395048
1 2 3 4
Weights -0.152934 -0.005749 0.214906 0.795246
1 2 3 4
Weights -0.010547 0.134357 0.207641 0.552711
1 2 3 4
Weights -0.105894 0.070239 0.24318 0.674559
1 2 3 4
Weights -0.272754 0.006185 0.291149 0.864239
1 2 3 4
Weights -0.237817 -0.023784 0.176509 0.903944
1 2 3 4
Weights -0.044634 0.369777 0.220674 0.316778
1 2 3 4
Weights -0.207535 -0.062019 0.199043 0.867061
1 2 3 4
Weights -0.348069 0.00904 0.2071 0.993658
1 2 3 4
Weights -0.136986 0.070931 0.19111 0.719484
1 2 3 4
Weights -0.03069 0.068412 0.075907 0.693583
1 2 3 4
Weights -0.083559 0.023518 0.202842 0.782462
1 2 3 4
Weights -0.221584 -0.186303 0.167649 1.022402
1 2 3 4
Weights -0.071047 0.074548 0.187125 0.705139
1 2 3 4
Weights 0.048344 0.136199 0.184617 0.471696
1 2 3 4
Weights -0.022267 0.099145 0.117072 0.659909
1 2 3 4
Weights -0.253317 -0.157727 0.16614 1.037676
1 2 3 4
Weights -0.232082 0.107449 0.243596 0.730311
1 2 3 4
Weights -0.253716 -0.034782 0.242216 0.819009
1 2 3 4
Weights -0.197192 -0.090569 0.236504 0.864434
1 2 3 4
Weights -0.090311 0.182503 0.17442 0.540807
1 2 3 4
Weights -0.120328 0.063347 0.274974 0.633408
1 2 3 4
Weights -0.148827 0.014095 0.191658 0.826833
1 2 3 4
Weights -0.273714 0.031848 0.164572 0.906024
1 2 3 4
Weights -0.2441 0.011661 0.228563 0.865179
1 2 3 4
Weights -0.256976 -0.121663 0.312642 0.906391
1 2 3 4
Weights -0.191365 0.015957 0.128688 0.882231
1 2 3 4
Weights -0.170391 0.222877 0.159508 0.60654
1 2 3 4
Weights -0.222403 0.056125 0.314719 0.696166
1 2 3 4
Weights -0.208915 0.02973 0.31935 0.705553
1 2 3 4
Weights -0.030072 0.077457 0.201252 0.511753
1 2 3 4
Weights 0.003888 0.168673 0.196537 0.524467
1 2 3 4
Weights -0.212461 -0.230772 0.15101 1.080017
1 2 3 4
Weights -0.135084 -0.006559 0.149307 0.785562
1 2 3 4
Weights -0.125706 0.129105 0.250023 0.641922
1 2 3 4
Weights -0.166175 0.094081 0.126299 0.839971
1 2 3 4
Weights -0.171655 -0.079625 0.161063 0.90467
1 2 3 4
Weights -0.159567 0.041502 0.213626 0.725345
1 2 3 4
Weights -0.205668 -0.029613 0.301585 0.740125
1 2 3 4
Weights -0.085993 0.087599 0.229365 0.650215
1 2 3 4
Weights -0.131466 -0.041951 0.185297 0.802949
1 2 3 4
Weights -0.056133 0.129593 0.232359 0.598394
1 2 3 4
Weights -0.1014 0.113233 0.299301 0.523724
1 2 3 4
Weights -0.20375 -0.028859 0.257109 0.826994
1 2 3 4
Weights -0.190622 0.045064 0.222596 0.71826
1 2 3 4
Weights -0.26917 -0.014185 0.25943 0.851842
1 2 3 4
Weights -0.305145 -0.119402 0.284274 0.970242
1 2 3 4
Weights -0.269107 -0.11531 0.227531 0.970935
1 2 3 4
Weights -0.234163 -0.249322 0.281124 1.022303
1 2 3 4
Weights -0.216386 0.045089 0.201487 0.741348
1 2 3 4
Weights -0.197728 -0.220141 0.092656 1.178676
1 2 3 4
Weights -0.157919 0.135778 0.208116 0.635865
1 2 3 4
Weights -0.228624 -0.052526 0.253001 0.86669
1 2 3 4
Weights -0.197155 0.017484 0.207353 0.885853
1 2 3 4
Weights -0.272281 -0.112801 0.302604 0.892163
1 2 3 4
Weights 0.115759 0.241331 -0.27776 0.598985
1 2 3 4
Weights -0.189588 -0.073212 0.31456 0.737861
1 2 3 4
Weights -0.200079 0.205672 0.211076 0.671071
1 2 3 4
Weights -0.236147 -0.125426 0.248051 0.893117
1 2 3 4
Weights -0.082611 0.092083 0.245233 0.591318
1 2 3 4
Weights 0.154002 0.113806 0.003783 0.181857
1 2 3 4
Weights -0.187821 0.082366 0.175086 0.73095
1 2 3 4
Weights -0.286179 -0.166327 -0.01333 1.359089
1 2 3 4
Weights -0.165798 0.032645 0.236627 0.733659
1 2 3 4
Weights -0.1713 -0.036949 0.205832 0.846737
1 2 3 4
Weights -0.121487 0.18013 0.148542 0.61619
1 2 3 4
Weights -0.259453 -0.089162 0.282077 0.901413
1 2 3 4
Weights -0.087589 0.030817 0.184194 0.762907
1 2 3 4
Weights -0.148555 0.047948 0.20209 0.693798
1 2 3 4
Weights -0.102711 0.037665 0.221614 0.64551
1 2 3 4
Weights -0.043512 0.075969 0.201252 0.676622
1 2 3 4
Weights -0.17769 0.019565 0.199023 0.812075
1 2 3 4
Weights -0.225116 0.026377 0.273723 0.740074
1 2 3 4
Weights -0.231348 -0.072077 0.27207 0.809462
1 2 3 4
Weights 0.112258 0.124497 0.11542 0.324748
1 2 3 4
Weights -0.217548 -0.123213 0.199932 0.973343
1 2 3 4
Weights -0.173728 -0.048818 0.27186 0.803818
1 2 3 4
Weights -0.370758 0.085458 0.240831 0.941691
1 2 3 4
Weights -0.184337 -0.119818 0.079795 1.0284
1 2 3 4
Weights -0.228355 0.040217 0.268915 0.787127
1 2 3 4
Weights -0.335012 0.103107 0.252197 0.869277
1 2 3 4
Weights -0.211243 -0.111746 0.263106 0.840891
1 2 3 4
Weights 0.033454 0.082751 0.190525 0.413051
1 2 3 4
Weights -0.169458 0.20747 0.17349 0.659575
1 2 3 4
Weights -0.243795 -0.02776 0.349988 0.721177
1 2 3 4
Weights -0.242551 -0.031994 0.228227 0.868773
1 2 3 4
Weights 0.016728 0.459261 0.148305 0.260584
1 2 3 4
Weights -0.331495 -0.013493 0.229122 1.014551
1 2 3 4
Weights -0.272753 0.066954 0.251185 0.812999
1 2 3 4
Weights -0.251405 -0.110817 0.20292 1.031993
1 2 3 4
Weights -0.065586 0.093933 0.134703 0.674671
1 2 3 4
Weights -0.168492 0.074187 0.166528 0.763847
1 2 3 4
Weights 0.014141 0.196104 0.203281 0.348024
1 2 3 4
Weights -0.015828 0.118629 0.18116 0.569929
1 2 3 4
Weights -0.330203 0.004843 0.292664 0.90352
1 2 3 4
Weights -0.285404 -0.11418 0.237798 1.005435
1 2 3 4
Weights -0.265415 -0.050092 0.197974 0.944436
1 2 3 4
Weights -0.183824 0.036322 0.237272 0.750894
1 2 3 4
Weights -0.271548 0.01402 0.240584 0.829913
1 2 3 4
Weights -0.287999 0.001862 0.252291 0.908318
1 2 3 4
Weights 0.094628 0.076272 0.135564 0.40951
1 2 3 4
Weights -0.009733 0.222882 0.151949 0.526702
1 2 3 4
Weights -0.204705 0.042354 0.142803 0.802254
1 2 3 4
Weights -0.323196 0.075175 0.196865 0.940367
1 2 3 4
Weights -0.279604 0.017823 0.268397 0.824793
1 2 3 4
Weights -0.262985 -0.060253 0.350752 0.827103
1 2 3 4
Weights -0.159416 0.111808 0.246422 0.701151
1 2 3 4
Weights -0.215616 -0.129897 0.228338 0.906299
1 2 3 4
Weights -0.14473 0.113937 0.257562 0.662265
1 2 3 4
Weights -0.017032 -0.096257 0.023605 0.819952
1 2 3 4
Weights -0.141174 0.133715 0.164812 0.633426
1 2 3 4
Weights -0.283466 -0.078736 0.352426 0.865487
1 2 3 4
Weights -0.17303 0.153549 0.213496 0.665508
1 2 3 4
Weights -0.106273 0.103133 0.243901 0.625469
1 2 3 4
Weights -0.213645 -0.188951 0.175423 1.049157
1 2 3 4
Weights -0.211457 -0.001606 0.206122 0.825347
1 2 3 4
Weights -0.276496 0.065843 0.152277 0.904772
1 2 3 4
Weights -0.175171 0.003933 0.250978 0.72255
1 2 3 4
Weights -0.201052 0.026593 0.312322 0.70109
1 2 3 4
Weights 0.08182 0.221344 0.095202 0.488078
1 2 3 4
Weights -0.245788 0.051408 0.161353 0.810277
1 2 3 4
Weights -0.077794 0.027055 0.213579 0.70617
1 2 3 4
Weights -0.057567 0.141936 0.180375 0.570248
1 2 3 4
Weights -0.158395 -0.053757 0.136901 0.90804
1 2 3 4
Weights -0.250952 0.016024 0.213768 0.921463
1 2 3 4
Weights -0.180334 -0.034206 0.289903 0.773518
1 2 3 4
Weights -0.211897 -0.04295 0.205431 0.866298
1 2 3 4
Weights -0.239833 -0.105761 0.222161 0.922962
1 2 3 4
Weights -0.12213 0.096936 0.21384 0.654269
1 2 3 4
Weights 0.103207 0.139319 0.151004 0.45886
1 2 3 4
Weights -0.129693 0.03423 0.260007 0.679907
1 2 3 4
Weights -0.225479 -0.055631 0.306316 0.811164
1 2 3 4
Weights 0.12974 0.115753 0.18104 0.437031
1 2 3 4
Weights -0.198705 -0.161367 0.101352 1.070061
1 2 3 4
Weights -0.278775 0.263089 0.233825 0.639733
1 2 3 4
Weights -0.264353 0.166854 0.216016 0.770264
1 2 3 4
Weights -0.187076 -0.161405 0.118028 1.018459
1 2 3 4
Weights -0.00959 0.07519 0.207231 0.511704
1 2 3 4
Weights -0.254791 0.056072 0.263451 0.803631
1 2 3 4
Weights 0.015819 0.134294 0.137335 0.441473
1 2 3 4
Weights -0.210512 -0.11469 0.134377 1.03122
1 2 3 4
Weights -0.190073 -0.066933 0.16644 0.875249
1 2 3 4
Weights -0.194749 0.064876 0.244861 0.700411
1 2 3 4
Weights -0.269131 0.037745 0.281263 0.830189
1 2 3 4
Weights -0.234361 -0.083169 0.153164 0.97824
1 2 3 4
Weights -0.209304 0.023238 0.295703 0.765823
1 2 3 4
Weights -0.371335 0.099249 0.091215 1.041659
1 2 3 4
Weights -0.202599 -0.177671 0.166215 1.028867
1 2 3 4
Weights -0.218704 -0.148183 0.063878 1.113171
1 2 3 4
Weights -0.159816 0.09042 0.251267 0.640203
1 2 3 4
Weights -0.213782 0.018279 0.272018 0.733455
1 2 3 4
Weights -0.26735 -0.016622 0.264533 0.819688
1 2 3 4
Weights -0.244054 -0.101914 0.203593 0.961045
1 2 3 4
Weights -0.126633 -0.015077 0.206955 0.779786
1 2 3 4
Weights -0.169659 0.056754 0.292242 0.779166
1 2 3 4
Weights 0.010353 0.148592 0.171888 0.60221
1 2 3 4
Weights -0.040995 0.060185 0.231183 0.53009
1 2 3 4
Weights -0.099462 0.135132 0.103479 0.642309
1 2 3 4
Weights -0.182454 -0.062123 0.177655 0.8754
1 2 3 4
Weights -0.269916 -0.152374 0.285367 0.982983
1 2 3 4
Weights -0.317547 -0.103677 0.232928 1.078268
1 2 3 4
Weights -0.04656 0.097003 0.270284 0.38252
1 2 3 4
Weights -0.176192 0.105867 0.194501 0.702022
1 2 3 4
Weights -0.246869 -0.156513 0.261873 1.017917
1 2 3 4
Weights -0.067933 0.002108 0.107255 0.839973
1 2 3 4
Weights -0.336198 0.041076 0.259224 0.925741
1 2 3 4
Weights -0.185444 0.039881 0.298871 0.649554
1 2 3 4
Weights -0.097453 0.125281 0.234889 0.555243
1 2 3 4
Weights 0.122552 0.124567 0.205059 0.451549
1 2 3 4
Weights -0.168735 0.041773 0.216293 0.749565
1 2 3 4
Weights -0.072732 0.159449 0.196735 0.590512
1 2 3 4
Weights -0.239471 0.123897 0.244863 0.68643
1 2 3 4
Weights -0.068163 0.179877 0.12168 0.521558
1 2 3 4
Weights -0.059789 0.096608 0.170362 0.648665
1 2 3 4
Weights -0.264259 -0.118835 0.205455 0.982626
1 2 3 4
Weights -0.199121 -0.192048 0.162663 1.026581
1 2 3 4
Weights 0.048431 0.13911 0.191301 0.509362
1 2 3 4
Weights -0.277435 -0.166045 0.254316 1.047968
Predicting t+2...
Fitting t+3...
1 2 3 4
Weights -0.281703 -0.043295 0.259315 0.746516
1 2 3 4
Weights -0.096688 0.049468 0.231815 0.454553
1 2 3 4
Weights -0.217433 -0.304317 0.135251 1.044403
1 2 3 4
Weights -0.104693 -0.16361 0.344751 0.642976
1 2 3 4
Weights -0.009835 0.128553 0.313667 0.469545
1 2 3 4
Weights -0.157904 -0.21345 0.123338 0.917669
1 2 3 4
Weights -0.101383 -0.12659 0.344698 0.695648
1 2 3 4
Weights -0.324256 -0.118559 0.181941 0.950908
1 2 3 4
Weights -0.191681 -0.222592 0.152565 1.000988
1 2 3 4
Weights -0.134499 -0.298057 -0.053507 1.166838
1 2 3 4
Weights -0.125262 -0.271129 0.389542 0.681902
1 2 3 4
Weights -0.043883 -0.00519 0.216154 0.568093
1 2 3 4
Weights 0.368237 -0.017228 0.328898 0.188973
1 2 3 4
Weights -0.01329 -0.096525 0.339739 0.604374
1 2 3 4
Weights -0.108077 -0.05161 0.308234 0.694725
1 2 3 4
Weights -0.174251 -0.166836 0.198378 0.864559
1 2 3 4
Weights -0.178817 -0.053331 0.230359 0.767362
1 2 3 4
Weights -0.075298 0.036836 0.302596 0.492035
1 2 3 4
Weights -0.232456 -0.040835 0.321969 0.732653
1 2 3 4
Weights -0.085311 -0.298174 0.14949 0.93652
1 2 3 4
Weights -0.173675 -0.175992 0.197233 0.859729
1 2 3 4
Weights -0.197869 -0.167617 0.190102 0.839598
1 2 3 4
Weights -0.212036 -0.258779 0.106068 1.004655
1 2 3 4
Weights -0.231553 -0.114956 -0.039854 1.152763
1 2 3 4
Weights -0.205473 -0.134962 0.455923 0.71076
1 2 3 4
Weights -0.109359 -0.059764 0.383531 0.560359
1 2 3 4
Weights -0.280338 -0.018047 0.306936 0.717039
1 2 3 4
Weights -0.037767 -0.031174 0.34197 0.541509
1 2 3 4
Weights -0.158979 -0.049686 0.319652 0.576007
1 2 3 4
Weights 0.070678 -0.237307 0.278272 0.666696
1 2 3 4
Weights -0.228889 -0.311652 0.319266 1.041815
1 2 3 4
Weights -0.093382 -0.303335 0.102301 0.987441
1 2 3 4
Weights -0.253403 -0.117588 0.343137 0.750105
1 2 3 4
Weights -0.10776 -0.16422 0.271831 0.767663
1 2 3 4
Weights -0.137023 0.055621 0.439585 0.375342
1 2 3 4
Weights -0.171457 -0.092108 0.413798 0.681217
1 2 3 4
Weights -0.136786 0.02531 0.283474 0.573716
1 2 3 4
Weights -0.191151 -0.04809 0.224163 0.658587
1 2 3 4
Weights -0.162414 -0.223711 0.274517 0.90615
1 2 3 4
Weights -0.253352 -0.327553 0.338034 1.028562
1 2 3 4
Weights 0.048924 0.099488 0.295098 0.336748
1 2 3 4
Weights 0.310966 0.049443 0.157314 0.30019
1 2 3 4
Weights -0.213906 -0.00517 0.366626 0.643537
1 2 3 4
Weights -0.183597 -0.026182 0.245405 0.643895
1 2 3 4
Weights -0.174408 -0.038447 0.250476 0.734106
1 2 3 4
Weights 0.011092 -0.549195 0.065077 1.173082
1 2 3 4
Weights -0.068507 -0.006104 0.200442 0.619721
1 2 3 4
Weights -0.13108 -0.214316 0.149864 0.990094
1 2 3 4
Weights -0.22287 -0.216736 0.357913 0.841515
1 2 3 4
Weights 0.013643 0.092703 0.41738 0.32787
1 2 3 4
Weights -0.26695 -0.133621 0.236948 0.941665
1 2 3 4
Weights -0.062712 -0.026001 0.29342 0.51658
1 2 3 4
Weights -0.167817 -0.022717 0.289666 0.704561
1 2 3 4
Weights -0.214824 -0.155793 0.379734 0.787866
1 2 3 4
Weights -0.176573 0.063654 0.271492 0.577206
1 2 3 4
Weights -0.097456 0.08487 0.314592 0.416329
1 2 3 4
Weights -0.304696 -0.150927 0.172793 1.036367
1 2 3 4
Weights -0.001318 0.040563 0.274157 0.425016
1 2 3 4
Weights -0.028447 -0.233731 -0.023715 0.932748
1 2 3 4
Weights -0.100582 -0.006622 0.289406 0.578239
1 2 3 4
Weights -0.313957 -0.13987 0.266488 0.920652
1 2 3 4
Weights -0.09243 0.123913 0.354029 0.371546
1 2 3 4
Weights 0.06345 0.061439 0.371785 0.362313
1 2 3 4
Weights -0.203553 -0.00788 0.298565 0.639668
1 2 3 4
Weights 0.079753 0.068662 0.196069 0.425413
1 2 3 4
Weights -0.133391 -0.302053 0.011254 1.126625
1 2 3 4
Weights -0.205894 -0.101534 0.226806 0.720846
1 2 3 4
Weights -0.279568 -0.31604 0.061499 1.270621
1 2 3 4
Weights -0.167965 0.029874 0.371517 0.562032
1 2 3 4
Weights -0.045469 -0.335338 -0.081151 1.089856
1 2 3 4
Weights -0.089353 0.076492 0.253649 0.500711
1 2 3 4
Weights -0.29542 -0.19914 0.40808 0.839719
1 2 3 4
Weights -0.159912 -0.160767 0.110358 0.966468
1 2 3 4
Weights -0.205386 -0.081341 0.304142 0.605468
1 2 3 4
Weights 0.079783 0.093989 0.278726 0.372401
1 2 3 4
Weights 0.021385 0.083412 0.326882 0.157808
1 2 3 4
Weights -0.183504 -0.26643 0.537291 0.748319
1 2 3 4
Weights -0.231598 -0.092683 0.322541 0.812208
1 2 3 4
Weights -0.22689 -0.197862 0.18953 0.939929
1 2 3 4
Weights -0.005497 0.014376 0.22433 0.451979
1 2 3 4
Weights -0.156646 -0.038043 0.256174 0.649648
1 2 3 4
Weights -0.06118 -0.361496 0.211213 0.920642
1 2 3 4
Weights -0.203935 -0.14983 0.128136 0.90595
1 2 3 4
Weights 0.116603 0.047543 0.106255 0.500846
1 2 3 4
Weights -0.323005 -0.181411 0.200681 0.999133
1 2 3 4
Weights -0.144851 -0.248087 0.155719 0.88281
1 2 3 4
Weights -0.326277 -0.140799 0.309987 0.905042
1 2 3 4
Weights -0.099885 -0.044915 0.212046 0.662883
1 2 3 4
Weights -0.221462 -0.238299 0.237234 0.928406
1 2 3 4
Weights -0.062083 -0.042608 0.288406 0.572306
1 2 3 4
Weights -0.119216 -0.373391 0.019548 1.178358
1 2 3 4
Weights -0.029818 -0.438339 0.134596 1.044387
1 2 3 4
Weights -0.193164 -0.111486 0.248685 0.739192
1 2 3 4
Weights -0.028413 -0.025996 0.134081 0.762898
1 2 3 4
Weights -0.119439 -0.232116 0.245465 0.811063
1 2 3 4
Weights -0.147419 -0.140114 0.275506 0.785518
1 2 3 4
Weights -0.155287 -0.300225 0.03691 1.092854
1 2 3 4
Weights -0.24093 -0.43634 0.241114 1.14197
1 2 3 4
Weights -0.201815 -0.1737 0.359124 0.80281
1 2 3 4
Weights -0.168152 -0.395418 0.397469 0.903534
1 2 3 4
Weights -0.125158 -0.168495 0.352558 0.722596
1 2 3 4
Weights 0.01413 0.023312 0.341301 0.468929
1 2 3 4
Weights -0.219523 -0.097613 0.239389 0.805731
1 2 3 4
Weights -0.211434 -0.081924 0.308023 0.818803
1 2 3 4
Weights -0.161018 -0.003356 0.252255 0.644783
1 2 3 4
Weights -0.148976 -0.09207 0.211802 0.785971
1 2 3 4
Weights -0.02809 0.056044 0.212248 0.47767
1 2 3 4
Weights -0.269314 -0.293947 0.206871 1.048626
1 2 3 4
Weights -0.122324 -0.022672 0.324616 0.575272
1 2 3 4
Weights -0.27863 -0.179028 0.335332 0.796726
1 2 3 4
Weights -0.212043 0.00675 0.277429 0.637385
1 2 3 4
Weights -0.274618 -0.282279 0.530743 0.87279
1 2 3 4
Weights 0.043836 0.006478 0.281772 0.431485
1 2 3 4
Weights -0.060909 -0.10552 0.176291 0.639494
1 2 3 4
Weights -0.337823 -0.038039 0.313938 0.840087
1 2 3 4
Weights -0.136503 -0.004659 0.290102 0.671319
1 2 3 4
Weights -0.150099 -0.07668 0.213051 0.741891
1 2 3 4
Weights -0.027278 0.012266 0.244852 0.424807
1 2 3 4
Weights -0.135549 -0.177432 0.185559 0.753514
1 2 3 4
Weights -0.232711 -0.20084 0.514293 0.657377
1 2 3 4
Weights -0.231069 -0.040863 0.210375 0.735445
1 2 3 4
Weights -0.115866 -0.170066 0.316959 0.673704
1 2 3 4
Weights -0.243473 -0.249627 0.333308 0.918907
1 2 3 4
Weights -0.161564 0.083767 0.425704 0.535684
1 2 3 4
Weights -0.118998 -0.11617 0.229006 0.788067
1 2 3 4
Weights -0.030597 0.06617 0.272216 0.53072
1 2 3 4
Weights -0.081282 -0.058686 0.35107 0.617138
1 2 3 4
Weights -0.286904 -0.241807 0.553853 0.79866
1 2 3 4
Weights -0.154599 -0.28195 0.293886 0.86991
1 2 3 4
Weights 0.003083 0.045537 0.39624 0.384291
1 2 3 4
Weights -0.275233 -0.098709 0.198201 0.862932
1 2 3 4
Weights -0.341619 -0.291159 0.423829 0.989951
1 2 3 4
Weights -0.196745 0.034962 0.189197 0.744264
1 2 3 4
Weights 0.068134 -0.13161 0.279919 0.512476
1 2 3 4
Weights -0.090691 -0.016601 0.204217 0.793445
1 2 3 4
Weights -0.209951 -0.304918 0.145932 1.031174
1 2 3 4
Weights -0.079199 -0.017936 0.288849 0.656143
1 2 3 4
Weights 0.037106 0.091892 0.23264 0.428405
1 2 3 4
Weights -0.005618 0.00383 0.2122 0.585213
1 2 3 4
Weights -0.225987 -0.29178 0.103807 1.094639
1 2 3 4
Weights -0.248418 -0.062093 0.331516 0.753975
1 2 3 4
Weights -0.229142 -0.182482 0.244101 0.828076
1 2 3 4
Weights -0.228303 -0.13826 0.205592 0.878263
1 2 3 4
Weights -0.090992 -0.004938 0.34418 0.478889
1 2 3 4
Weights -0.165056 0.00849 0.292747 0.646803
1 2 3 4
Weights -0.137099 -0.11051 0.270594 0.803014
1 2 3 4
Weights -0.325504 -0.079351 0.205939 0.934654
1 2 3 4
Weights -0.211796 -0.244441 0.437525 0.80486
1 2 3 4
Weights -0.301658 -0.191038 0.312017 0.93157
1 2 3 4
Weights -0.167688 -0.149741 0.21794 0.851847
1 2 3 4
Weights -0.1154 -0.128984 0.499094 0.479838
1 2 3 4
Weights -0.2642 -0.066966 0.376818 0.719503
1 2 3 4
Weights -0.27065 -0.049109 0.352181 0.733624
1 2 3 4
Weights -0.004377 0.003254 0.249075 0.425544
1 2 3 4
Weights -0.02364 0.07848 0.309953 0.487736
1 2 3 4
Weights -0.194077 -0.305354 0.016034 1.155407
1 2 3 4
Weights -0.059753 -0.135658 0.140065 0.760316
1 2 3 4
Weights -0.169878 0.048611 0.275332 0.694472
1 2 3 4
Weights -0.173344 -0.132161 0.40097 0.741481
1 2 3 4
Weights -0.126184 -0.228064 0.208994 0.866069
1 2 3 4
Weights -0.133822 -0.052208 0.170511 0.759591
1 2 3 4
Weights -0.222309 -0.080231 0.230704 0.78579
1 2 3 4
Weights -0.097033 -0.026268 0.369355 0.579445
1 2 3 4
Weights -0.150413 -0.094737 0.194206 0.777051
1 2 3 4
Weights -0.102693 0.059253 0.31027 0.595515
1 2 3 4
Weights -0.137411 0.02297 0.341356 0.535184
1 2 3 4
Weights -0.205002 -0.123163 0.250942 0.853936
1 2 3 4
Weights -0.148333 -0.106371 0.24137 0.717768
1 2 3 4
Weights -0.326896 -0.103356 0.269353 0.895326
1 2 3 4
Weights -0.279647 -0.272795 0.237071 1.05393
1 2 3 4
Weights -0.258799 -0.232089 0.170271 1.035387
1 2 3 4
Weights -0.241999 -0.318524 0.217005 1.061358
1 2 3 4
Weights -0.217823 -0.109025 0.273357 0.714888
1 2 3 4
Weights -0.208276 -0.278394 -0.028615 1.283368
1 2 3 4
Weights -0.134812 -0.073517 0.320229 0.637962
1 2 3 4
Weights -0.217047 -0.181552 0.274693 0.879008
1 2 3 4
Weights -0.118878 -0.198321 0.29362 0.894789
1 2 3 4
Weights -0.268079 -0.247696 0.324978 0.897347
1 2 3 4
Weights -0.048538 0.412117 -0.130368 0.364174
1 2 3 4
Weights -0.213319 -0.116545 0.29099 0.724423
1 2 3 4
Weights -0.099199 -0.229927 0.669572 0.489016
1 2 3 4
Weights -0.200476 -0.237209 0.188946 0.917295
1 2 3 4
Weights -0.062493 -0.009857 0.285201 0.571398
1 2 3 4
Weights 0.071871 0.141739 0.113208 0.082751
1 2 3 4
Weights -0.14076 -0.15995 0.365201 0.640997
1 2 3 4
Weights -0.318458 -0.390172 0.082533 1.448125
1 2 3 4
Weights -0.162593 -0.069603 0.254166 0.739102
1 2 3 4
Weights -0.147993 -0.199671 0.340865 0.769958
1 2 3 4
Weights -0.157023 0.028802 0.29008 0.583814
1 2 3 4
Weights -0.305314 -0.1468 0.239264 0.957811
1 2 3 4
Weights -0.13945 0.011617 0.211942 0.753066
1 2 3 4
Weights -0.181471 -0.0098 0.187391 0.705329
1 2 3 4
Weights -0.18089 0.026459 0.204916 0.652443
1 2 3 4
Weights -0.014783 0.004051 0.213052 0.669745
1 2 3 4
Weights -0.154479 -0.166457 0.346126 0.754081
1 2 3 4
Weights -0.210598 -0.110541 0.284343 0.76378
1 2 3 4
Weights -0.198193 -0.184855 0.230023 0.823719
1 2 3 4
Weights 0.048795 0.135568 0.150405 0.282737
1 2 3 4
Weights -0.239611 -0.19264 0.167057 1.005869
1 2 3 4
Weights -0.142865 -0.143311 0.261701 0.80685
1 2 3 4
Weights -0.37439 -0.25845 0.527555 0.943228
1 2 3 4
Weights -0.158437 -0.238455 0.064789 1.031718
1 2 3 4
Weights -0.258782 -0.134039 0.435913 0.752561
1 2 3 4
Weights -0.38489 -0.205965 0.594451 0.819325
1 2 3 4
Weights -0.214951 -0.172137 0.208229 0.849348
1 2 3 4
Weights -0.044 0.101125 0.190819 0.377406
1 2 3 4
Weights -0.114048 -0.094609 0.42889 0.592221
1 2 3 4
Weights -0.224284 -0.152423 0.360944 0.716531
1 2 3 4
Weights -0.260995 -0.162916 0.285989 0.86566
1 2 3 4
Weights -0.0039 0.093779 0.485376 0.27984
1 2 3 4
Weights -0.349926 -0.393056 0.656666 0.9205
1 2 3 4
Weights -0.255782 -0.18708 0.437488 0.78863
1 2 3 4
Weights -0.265673 -0.254507 0.257016 1.062315
1 2 3 4
Weights -0.04957 -0.026668 0.242759 0.602255
1 2 3 4
Weights -0.134039 -0.170026 0.407671 0.649509
1 2 3 4
Weights -0.063778 0.110532 0.260248 0.380138
1 2 3 4
Weights 0.03991 0.00836 0.243662 0.51383
1 2 3 4
Weights -0.309076 -0.257957 0.442585 0.92353
1 2 3 4
Weights -0.322024 -0.218979 0.230589 1.064608
1 2 3 4
Weights -0.203407 -0.336943 0.379278 0.893738
1 2 3 4
Weights -0.222798 -0.046895 0.263281 0.768771
1 2 3 4
Weights -0.267374 -0.141099 0.263184 0.863572
1 2 3 4
Weights -0.238652 -0.307899 0.486238 0.865076
1 2 3 4
Weights 0.078302 0.100022 0.10917 0.360966
1 2 3 4
Weights 0.016069 0.02508 0.38194 0.427904
1 2 3 4
Weights -0.214547 -0.076957 0.181148 0.78763
1 2 3 4
Weights -0.288679 -0.301699 0.529089 0.886782
1 2 3 4
Weights -0.31351 -0.097542 0.270319 0.884403
1 2 3 4
Weights -0.32403 -0.109313 0.317838 0.89114
1 2 3 4
Weights -0.15352 -0.044841 0.345869 0.705389
1 2 3 4
Weights -0.27317 -0.179093 0.241284 0.882731
1 2 3 4
Weights -0.16285 -0.015134 0.344201 0.671547
1 2 3 4
Weights -0.057633 -0.096272 0.103527 0.647871
1 2 3 4
Weights -0.140226 -0.031402 0.281928 0.588015
1 2 3 4
Weights -0.306416 -0.190406 0.370705 0.902273
1 2 3 4
Weights -0.120798 -0.149516 0.520216 0.540108
1 2 3 4
Weights -0.133967 0.032237 0.251764 0.658642
1 2 3 4
Weights -0.215339 -0.299134 0.168129 1.067429
1 2 3 4
Weights -0.23873 -0.081475 0.201215 0.846168
1 2 3 4
Weights -0.245228 -0.207288 0.344213 0.872503
1 2 3 4
Weights -0.159967 -0.109146 0.279701 0.698908
1 2 3 4
Weights -0.23025 -0.077257 0.355218 0.712171
1 2 3 4
Weights 0.093208 0.084535 0.298731 0.375363
1 2 3 4
Weights -0.163763 -0.209799 0.274529 0.7714
1 2 3 4
Weights -0.031866 -0.077499 0.287768 0.632943
1 2 3 4
Weights -0.089416 0.049336 0.261776 0.546149
1 2 3 4
Weights -0.160775 -0.16229 0.201979 0.865885
1 2 3 4
Weights -0.202402 -0.25482 0.405131 0.898217
1 2 3 4
Weights -0.199823 -0.099712 0.299162 0.773004
1 2 3 4
Weights -0.084154 -0.314692 0.315623 0.812227
1 2 3 4
Weights -0.25563 -0.17991 0.169508 0.959461
1 2 3 4
Weights -0.107282 -0.037427 0.277972 0.643051
1 2 3 4
Weights 0.078239 0.127494 0.187473 0.418745
1 2 3 4
Weights -0.163804 -0.018889 0.270747 0.682886
1 2 3 4
Weights -0.28067 -0.106217 0.301319 0.834576
1 2 3 4
Weights 0.103502 0.144438 0.189151 0.391972
1 2 3 4
Weights -0.115541 -0.376022 0.124996 1.077003
1 2 3 4
Weights -0.304156 -0.019248 0.422497 0.689259
1 2 3 4
Weights -0.23831 -0.183946 0.549193 0.701554
1 2 3 4
Weights -0.152221 -0.253341 0.030422 1.053189
1 2 3 4
Weights 0.013547 0.026286 0.218237 0.453571
1 2 3 4
Weights -0.21056 -0.198873 0.430389 0.780682
1 2 3 4
Weights -0.068617 0.117275 0.199497 0.396474
1 2 3 4
Weights -0.149622 -0.355601 0.261865 0.995474
1 2 3 4
Weights -0.184601 -0.149172 0.137959 0.873304
1 2 3 4
Weights -0.159754 -0.11376 0.324754 0.678903
1 2 3 4
Weights -0.225926 -0.231202 0.470648 0.802738
1 2 3 4
Weights -0.227292 -0.218577 0.16963 0.989771
1 2 3 4
Weights -0.266372 -0.034577 0.288744 0.824035
1 2 3 4
Weights -0.332369 -0.237922 0.296709 1.055561
1 2 3 4
Weights -0.151959 -0.341122 0.174359 1.033402
1 2 3 4
Weights -0.152446 -0.343757 0.050416 1.152615
1 2 3 4
Weights -0.130685 -0.028451 0.220593 0.688389
1 2 3 4
Weights -0.183933 -0.123046 0.28415 0.743866
1 2 3 4
Weights -0.187026 -0.256173 0.342409 0.803007
1 2 3 4
Weights -0.241783 -0.210324 0.170903 1.002951
1 2 3 4
Weights -0.15755 -0.027365 0.140257 0.819252
1 2 3 4
Weights -0.144383 -0.072797 0.329839 0.826042
1 2 3 4
Weights 0.027302 0.051464 0.290825 0.538306
1 2 3 4
Weights -0.059088 0.039853 0.200261 0.520957
1 2 3 4
Weights -0.08108 -0.051985 0.289281 0.531776
1 2 3 4
Weights -0.209101 -0.126284 0.181157 0.863336
1 2 3 4
Weights -0.307188 -0.209773 0.213177 1.063435
1 2 3 4
Weights -0.296512 -0.42738 0.478778 1.067873
1 2 3 4
Weights -0.018756 0.027336 0.198254 0.410373
1 2 3 4
Weights -0.173229 -0.089165 0.360572 0.644139
1 2 3 4
Weights -0.268144 -0.239344 0.226112 1.087611
1 2 3 4
Weights -0.054552 -0.054579 0.125922 0.809888
1 2 3 4
Weights -0.26044 -0.363948 0.565305 0.88762
1 2 3 4
Weights -0.261448 -0.053496 0.400006 0.613476
1 2 3 4
Weights -0.144913 0.058068 0.234529 0.596405
1 2 3 4
Weights 0.163735 0.112891 0.182009 0.424282
1 2 3 4
Weights -0.143633 -0.101039 0.278072 0.729961
1 2 3 4
Weights -0.135703 0.029579 0.395593 0.523942
1 2 3 4
Weights -0.324532 0.005277 0.31265 0.729149
1 2 3 4
Weights -0.115871 0.06467 0.230706 0.479243
1 2 3 4
Weights 0.017745 -0.073364 0.286235 0.568857
1 2 3 4
Weights -0.226258 -0.317082 0.254527 0.987772
1 2 3 4
Weights -0.20036 -0.297733 0.167598 1.015178
1 2 3 4
Weights 0.069872 0.071901 0.260539 0.449638
1 2 3 4
Weights -0.30156 -0.302241 0.28654 1.092835
Predicting t+3...
Fitting t+4...
1 2 3 4
Weights -0.247992 -0.121887 0.22732 0.71731
1 2 3 4
Weights -0.095435 -0.001227 0.159148 0.483604
1 2 3 4
Weights -0.161118 -0.336654 -0.03772 1.076913
1 2 3 4
Weights -0.183179 -0.040592 0.149377 0.704637
1 2 3 4
Weights 0.049108 0.04434 0.211767 0.574874
1 2 3 4
Weights -0.106897 -0.208395 -0.036347 0.917739
1 2 3 4
Weights -0.112036 -0.018117 0.119733 0.76503
1 2 3 4
Weights -0.267541 -0.255045 0.150843 0.953527
1 2 3 4
Weights -0.171268 -0.19817 -0.010092 1.029181
1 2 3 4
Weights -0.071663 -0.352046 -0.172223 1.165979
1 2 3 4
Weights -0.196011 -0.129796 0.152533 0.745559
1 2 3 4
Weights -0.027996 -0.011374 0.13756 0.568958
1 2 3 4
Weights 0.002171 0.372718 0.063748 0.406437
1 2 3 4
Weights -0.108556 0.035356 0.179202 0.677776
1 2 3 4
Weights -0.100922 -0.053033 0.151016 0.799229
1 2 3 4
Weights -0.229347 -0.087374 0.034421 0.908516
1 2 3 4
Weights -0.146949 -0.103864 0.151877 0.790614
1 2 3 4
Weights -0.031097 0.025172 0.168475 0.529176
1 2 3 4
Weights -0.300761 -0.040667 0.261169 0.781786
1 2 3 4
Weights -0.059549 -0.253263 -0.05179 0.97336
1 2 3 4
Weights -0.184773 -0.132606 0.07759 0.849686
1 2 3 4
Weights -0.18015 -0.181702 0.088166 0.828962
1 2 3 4
Weights -0.132136 -0.340254 -0.017047 1.011525
1 2 3 4
Weights -0.210893 -0.163509 -0.070587 1.125094
1 2 3 4
Weights -0.238138 -0.096571 0.20896 0.893523
1 2 3 4
Weights -0.185813 0.011932 0.160298 0.717544
1 2 3 4
Weights -0.32255 -0.058962 0.27459 0.731308
1 2 3 4
Weights -0.088459 0.016489 0.192893 0.640686
1 2 3 4
Weights -0.165158 -0.053892 0.16456 0.649256
1 2 3 4
Weights -0.098365 0.050016 0.005288 0.752981
1 2 3 4
Weights -0.239533 -0.28602 0.132086 1.14746
1 2 3 4
Weights -0.106131 -0.206972 -0.091477 0.994882
1 2 3 4
Weights -0.190178 -0.146073 0.074579 0.898986
1 2 3 4
Weights -0.111162 -0.109389 0.080057 0.836635
1 2 3 4
Weights -0.074484 -0.010506 0.094737 0.658846
1 2 3 4
Weights -0.132066 -0.11547 0.168912 0.858604
1 2 3 4
Weights -0.100563 -0.01449 0.143414 0.646263
1 2 3 4
Weights -0.193534 -0.092409 0.157387 0.662823
1 2 3 4
Weights -0.124914 -0.203543 0.020292 1.036407
1 2 3 4
Weights -0.241647 -0.310289 0.070244 1.190346
1 2 3 4
Weights 0.031331 0.101059 0.183035 0.417491
1 2 3 4
Weights 0.426492 0.105883 0.043369 0.259499
1 2 3 4
Weights -0.216362 -0.001823 0.201522 0.73516
1 2 3 4
Weights -0.199076 -0.045475 0.149581 0.67435
1 2 3 4
Weights -0.178848 -0.077799 0.217 0.736824
1 2 3 4
Weights 0.029964 -0.452042 -0.183094 1.200739
1 2 3 4
Weights -0.047773 -0.019694 0.128026 0.616199
1 2 3 4
Weights -0.118835 -0.171832 0.01355 1.001684
1 2 3 4
Weights -0.247655 -0.181882 0.187319 0.918605
1 2 3 4
Weights 0.078074 0.066482 0.134181 0.545524
1 2 3 4
Weights -0.262445 -0.146816 0.038605 1.071371
1 2 3 4
Weights -0.07772 0.007749 0.158509 0.556958
1 2 3 4
Weights -0.215388 -0.022945 0.231351 0.743681
1 2 3 4
Weights -0.233314 -0.148147 0.136143 0.977231
1 2 3 4
Weights -0.204708 0.006667 0.337244 0.502362
1 2 3 4
Weights -0.016162 0.03378 0.119345 0.508241
1 2 3 4
Weights -0.320837 -0.201036 0.135848 1.04592
1 2 3 4
Weights -0.043818 0.053859 0.132393 0.534606
1 2 3 4
Weights -0.007678 -0.246044 -0.053259 0.844397
1 2 3 4
Weights -0.116163 -0.010966 0.18218 0.63559
1 2 3 4
Weights -0.240415 -0.218624 0.046448 1.057662
1 2 3 4
Weights -0.07682 0.022277 0.19312 0.554382
1 2 3 4
Weights 0.027033 0.108721 0.160699 0.533216
1 2 3 4
Weights -0.231135 -0.043337 0.280332 0.627515
1 2 3 4
Weights 0.183112 -0.00179 0.051221 0.51059
1 2 3 4
Weights -0.060599 -0.33624 -0.145451 1.144515
1 2 3 4
Weights -0.234519 -0.083834 0.162377 0.677388
1 2 3 4
Weights -0.164909 -0.452452 -0.127423 1.385531
1 2 3 4
Weights -0.009453 -0.100597 0.10801 0.749995
1 2 3 4
Weights -0.006682 -0.304139 -0.199377 1.015984
1 2 3 4
Weights -0.129831 0.0416 0.279586 0.468766
1 2 3 4
Weights -0.324895 -0.170793 0.256251 0.902362
1 2 3 4
Weights -0.108455 -0.162167 -0.064319 1.012993
1 2 3 4
Weights -0.200996 -0.104426 0.210919 0.600756
1 2 3 4
Weights 0.045246 0.122273 0.187482 0.431613
1 2 3 4
Weights -0.025876 0.058163 0.108239 0.383052
1 2 3 4
Weights -0.283739 -0.110696 0.22614 0.944765
1 2 3 4
Weights -0.294149 -0.096136 0.283367 0.847538
1 2 3 4
Weights -0.221687 -0.203358 0.107014 0.919548
1 2 3 4
Weights -0.030477 0.063541 0.126795 0.448023
1 2 3 4
Weights -0.125809 -0.079556 0.182148 0.648971
1 2 3 4
Weights -0.075561 -0.240803 -0.02482 0.956231
1 2 3 4
Weights -0.139811 -0.151366 -0.101336 0.975069
1 2 3 4
Weights 0.021771 0.134158 0.076714 0.485673
1 2 3 4
Weights -0.298805 -0.247022 0.077271 1.054668
1 2 3 4
Weights -0.120576 -0.249188 0.036066 0.866694
1 2 3 4
Weights -0.315984 -0.209097 0.244357 0.936903
1 2 3 4
Weights -0.152534 0.022335 0.122835 0.653569
1 2 3 4
Weights -0.202989 -0.239494 0.094941 0.952966
1 2 3 4
Weights -0.105782 0.000079 0.19467 0.596264
1 2 3 4
Weights -0.036759 -0.427322 -0.146509 1.214584
1 2 3 4
Weights 0.014962 -0.352101 -0.12091 1.075986
1 2 3 4
Weights -0.200421 -0.104669 0.14596 0.739974
1 2 3 4
Weights -0.128419 0.020959 0.181534 0.715823
1 2 3 4
Weights -0.087922 -0.189519 0.061762 0.831352
1 2 3 4
Weights -0.154843 -0.118036 0.122539 0.851242
1 2 3 4
Weights -0.113953 -0.302669 -0.080143 1.05834
1 2 3 4
Weights -0.16344 -0.489964 0.045159 1.208578
1 2 3 4
Weights -0.216241 -0.143084 0.180091 0.893481
1 2 3 4
Weights -0.173435 -0.306848 0.074033 1.055762
1 2 3 4
Weights -0.148569 -0.113625 0.175079 0.80011
1 2 3 4
Weights -0.068941 0.074717 0.221754 0.576351
1 2 3 4
Weights -0.197361 -0.123269 0.128451 0.831299
1 2 3 4
Weights -0.255463 -0.083503 0.186218 0.930332
1 2 3 4
Weights -0.1281 -0.04567 0.113308 0.712759
1 2 3 4
Weights -0.096946 -0.087933 0.049022 0.819969
1 2 3 4
Weights -0.053583 0.039088 0.190471 0.469486
1 2 3 4
Weights -0.222354 -0.335943 0.050824 1.090689
1 2 3 4
Weights -0.105843 -0.024994 0.201515 0.613203
1 2 3 4
Weights -0.293858 -0.176504 0.227418 0.803553
1 2 3 4
Weights -0.172865 -0.061476 0.176754 0.677534
1 2 3 4
Weights -0.298538 -0.245458 0.238253 1.097102
1 2 3 4
Weights -0.027121 0.091981 0.16781 0.472177
1 2 3 4
Weights -0.094025 -0.014461 0.024776 0.6395
1 2 3 4
Weights -0.361098 -0.097153 0.231042 0.92286
1 2 3 4
Weights -0.186069 -0.005214 0.235089 0.716714
1 2 3 4
Weights -0.190501 -0.046407 0.140519 0.735284
1 2 3 4
Weights -0.026144 0.023753 0.136715 0.440593
1 2 3 4
Weights -0.103914 -0.157541 0.04021 0.738518
1 2 3 4
Weights -0.15558 -0.181514 0.021546 0.981343
1 2 3 4
Weights -0.184202 -0.125726 0.179323 0.7017
1 2 3 4
Weights -0.148653 -0.073942 0.058992 0.780269
1 2 3 4
Weights -0.235325 -0.244277 0.131892 1.023303
1 2 3 4
Weights -0.084954 0.043807 0.143928 0.745258
1 2 3 4
Weights -0.14798 -0.061513 0.124747 0.795459
1 2 3 4
Weights -0.076779 0.068506 0.209898 0.588651
1 2 3 4
Weights -0.167832 0.012989 0.233435 0.693858
1 2 3 4
Weights -0.28794 -0.244385 0.275631 1.01942
1 2 3 4
Weights -0.141941 -0.202784 0.021584 0.962854
1 2 3 4
Weights -0.010047 0.119674 0.078732 0.598791
1 2 3 4
Weights -0.243147 -0.192616 0.153442 0.863807
1 2 3 4
Weights -0.34167 -0.284613 0.122886 1.203933
1 2 3 4
Weights -0.20275 -0.019208 0.156753 0.76164
1 2 3 4
Weights -0.02256 0.038719 0.048526 0.596637
1 2 3 4
Weights -0.118123 -0.002531 0.171884 0.802997
1 2 3 4
Weights -0.194544 -0.309366 0.023135 1.023909
1 2 3 4
Weights -0.097055 -0.015584 0.1887 0.725754
1 2 3 4
Weights 0.034966 0.076933 0.179865 0.452613
1 2 3 4
Weights -0.145916 0.117612 0.127229 0.632543
1 2 3 4
Weights -0.161978 -0.33894 -0.032128 1.102337
1 2 3 4
Weights -0.261992 -0.06942 0.170957 0.858411
1 2 3 4
Weights -0.207577 -0.175524 0.093458 0.839385
1 2 3 4
Weights -0.18878 -0.203165 0.151456 0.864869
1 2 3 4
Weights -0.085554 -0.014269 0.142948 0.613035
1 2 3 4
Weights -0.204011 -0.013546 0.252969 0.674718
1 2 3 4
Weights -0.169151 -0.072732 0.147818 0.861932
1 2 3 4
Weights -0.270531 -0.183031 0.090663 1.007652
1 2 3 4
Weights -0.225276 -0.192646 0.162867 0.969392
1 2 3 4
Weights -0.271254 -0.263128 0.24133 0.954874
1 2 3 4
Weights -0.146069 -0.140799 0.041725 0.918615
1 2 3 4
Weights -0.082643 -0.087051 0.087416 0.751769
1 2 3 4
Weights -0.290869 -0.093012 0.270288 0.795285
1 2 3 4
Weights -0.294398 -0.096891 0.290555 0.782876
1 2 3 4
Weights -0.011598 0.026883 0.148895 0.435935
1 2 3 4
Weights -0.028533 0.049228 0.212107 0.580982
1 2 3 4
Weights -0.135213 -0.364261 -0.055752 1.109973
1 2 3 4
Weights -0.091414 -0.034484 0.0121 0.729923
1 2 3 4
Weights -0.120957 -0.023416 0.19485 0.751139
1 2 3 4
Weights -0.181288 -0.124209 0.149316 0.941223
1 2 3 4
Weights -0.12002 -0.180981 0.047675 0.884725
1 2 3 4
Weights -0.124805 -0.033685 0.081113 0.742317
1 2 3 4
Weights -0.243336 -0.085548 0.199491 0.744786
1 2 3 4
Weights -0.152932 -0.008216 0.248877 0.683386
1 2 3 4
Weights -0.246632 -0.041708 0.147361 0.772881
1 2 3 4
Weights -0.145625 0.036636 0.250917 0.67602
1 2 3 4
Weights -0.125299 -0.017078 0.251588 0.580255
1 2 3 4
Weights -0.189621 -0.140006 0.161489 0.868921
1 2 3 4
Weights -0.130156 -0.076808 0.083804 0.738915
1 2 3 4
Weights -0.317333 -0.174052 0.189061 0.940865
1 2 3 4
Weights -0.282911 -0.262315 0.108488 1.077535
1 2 3 4
Weights -0.254956 -0.238741 0.068469 1.035738
1 2 3 4
Weights -0.204953 -0.368364 0.149099 1.03979
1 2 3 4
Weights -0.182223 -0.134579 0.105193 0.770063
1 2 3 4
Weights -0.212722 -0.310851 -0.072102 1.271675
1 2 3 4
Weights -0.166916 -0.042846 0.153009 0.732752
1 2 3 4
Weights -0.174441 -0.204507 0.136537 0.916516
1 2 3 4
Weights -0.136344 -0.108852 0.086909 0.986174
1 2 3 4
Weights -0.282262 -0.234318 0.197404 0.920881
1 2 3 4
Weights -0.149501 0.176664 0.21935 0.298229
1 2 3 4
Weights -0.175023 -0.161942 0.229673 0.694468
1 2 3 4
Weights -0.159876 -0.083425 0.160993 0.862681
1 2 3 4
Weights -0.200289 -0.207618 0.085043 0.877841
1 2 3 4
Weights -0.084196 0.019412 0.183442 0.603858
1 2 3 4
Weights 0.07255 0.046558 0.137805 0.152065
1 2 3 4
Weights -0.125854 -0.114684 0.088949 0.773454
1 2 3 4
Weights -0.212968 -0.575581 -0.143878 1.683922
1 2 3 4
Weights -0.171491 -0.06159 0.156825 0.76014
1 2 3 4
Weights -0.150963 -0.163464 0.151213 0.851261
1 2 3 4
Weights -0.140586 -0.020001 0.131348 0.702212
1 2 3 4
Weights -0.295858 -0.208537 0.194227 0.961756
1 2 3 4
Weights -0.151103 -0.031151 0.192833 0.773478
1 2 3 4
Weights -0.141838 -0.072815 0.126327 0.699641
1 2 3 4
Weights -0.170665 -0.058289 0.207343 0.631863
1 2 3 4
Weights -0.04593 0.053541 0.146062 0.678581
1 2 3 4
Weights -0.182751 -0.118594 0.153548 0.85658
1 2 3 4
Weights -0.199135 -0.106426 0.150562 0.793025
1 2 3 4
Weights -0.172292 -0.172476 0.11425 0.795105
1 2 3 4
Weights -0.031868 0.103863 0.18494 0.301964
1 2 3 4
Weights -0.232203 -0.230573 0.107036 1.002439
1 2 3 4
Weights -0.136606 -0.116546 0.164347 0.800525
1 2 3 4
Weights -0.374609 -0.272741 0.193148 1.23356
1 2 3 4
Weights -0.115756 -0.254457 -0.060377 1.029286
1 2 3 4
Weights -0.283676 -0.146198 0.258927 0.894465
1 2 3 4
Weights -0.393611 -0.240936 0.269314 1.12372
1 2 3 4
Weights -0.192289 -0.194672 0.144498 0.80416
1 2 3 4
Weights -0.045358 0.019824 0.200931 0.362741
1 2 3 4
Weights -0.105585 -0.045317 0.106328 0.804278
1 2 3 4
Weights -0.21805 -0.137539 0.233043 0.725402
1 2 3 4
Weights -0.225588 -0.208972 0.144139 0.926791
1 2 3 4
Weights 0.011563 0.070947 0.12023 0.622223
1 2 3 4
Weights -0.305728 -0.417779 0.213692 1.286218
1 2 3 4
Weights -0.227206 -0.188262 0.165194 0.962228
1 2 3 4
Weights -0.199856 -0.33991 0.111635 1.156032
1 2 3 4
Weights -0.073802 0.005284 0.112965 0.659982
1 2 3 4
Weights -0.161632 -0.10472 0.132341 0.812515
1 2 3 4
Weights -0.042046 0.028444 0.176305 0.448214
1 2 3 4
Weights -0.037172 0.101722 0.143083 0.549446
1 2 3 4
Weights -0.381656 -0.185782 0.213534 1.075722
1 2 3 4
Weights -0.289254 -0.297896 0.132508 1.11758
1 2 3 4
Weights -0.227357 -0.244117 0.082195 1.030004
1 2 3 4
Weights -0.243081 -0.073222 0.190558 0.807053
1 2 3 4
Weights -0.250406 -0.154039 0.111974 0.914666
1 2 3 4
Weights -0.25195 -0.241625 0.16527 1.065563
1 2 3 4
Weights 0.064975 0.084994 0.129549 0.308326
1 2 3 4
Weights -0.068838 0.08739 0.180094 0.612692
1 2 3 4
Weights -0.217063 -0.084445 0.060688 0.813975
1 2 3 4
Weights -0.238209 -0.296582 0.114725 1.187368
1 2 3 4
Weights -0.304015 -0.143447 0.165759 0.933828
1 2 3 4
Weights -0.328015 -0.172908 0.291022 0.901932
1 2 3 4
Weights -0.185776 -0.018155 0.199544 0.808252
1 2 3 4
Weights -0.22333 -0.271347 0.169157 0.887262
1 2 3 4
Weights -0.175992 -0.026119 0.221868 0.765834
1 2 3 4
Weights -0.042974 -0.122255 0.060984 0.59724
1 2 3 4
Weights -0.116296 -0.047449 0.103401 0.674206
1 2 3 4
Weights -0.37269 -0.172948 0.298541 0.936719
1 2 3 4
Weights -0.124289 -0.088807 0.16025 0.784225
1 2 3 4
Weights -0.122421 -0.000369 0.185141 0.687303
1 2 3 4
Weights -0.168464 -0.348929 0.050365 1.089427
1 2 3 4
Weights -0.231676 -0.117745 0.126843 0.857563
1 2 3 4
Weights -0.245466 -0.171538 0.063475 1.037862
1 2 3 4
Weights -0.174028 -0.084166 0.163758 0.712167
1 2 3 4
Weights -0.277844 -0.080601 0.270468 0.764049
1 2 3 4
Weights 0.070168 0.096101 0.144499 0.514427
1 2 3 4
Weights -0.101211 -0.172066 -0.010614 0.862325
1 2 3 4
Weights -0.073797 -0.004754 0.172351 0.662351
1 2 3 4
Weights -0.114979 0.028982 0.166927 0.618614
1 2 3 4
Weights -0.179683 -0.143071 0.087415 0.894218
1 2 3 4
Weights -0.292457 -0.133754 0.160436 1.054596
1 2 3 4
Weights -0.207119 -0.115086 0.236164 0.781921
1 2 3 4
Weights -0.100535 -0.165968 0.029675 0.881478
1 2 3 4
Weights -0.187026 -0.260214 0.086997 0.950583
1 2 3 4
Weights -0.108241 -0.023965 0.14818 0.696352
1 2 3 4
Weights 0.079996 0.094779 0.17069 0.430699
1 2 3 4
Weights -0.176987 -0.044242 0.22283 0.693911
1 2 3 4
Weights -0.267929 -0.172454 0.254136 0.847072
1 2 3 4
Weights 0.068006 0.130448 0.215729 0.381469
1 2 3 4
Weights -0.103685 -0.306094 -0.092278 1.112194
1 2 3 4
Weights -0.230168 -0.090312 0.135399 0.905872
1 2 3 4
Weights -0.273369 -0.129173 0.186368 0.986877
1 2 3 4
Weights -0.143564 -0.234061 -0.058764 1.002163
1 2 3 4
Weights -0.031521 0.068347 0.168068 0.43806
1 2 3 4
Weights -0.29965 -0.100404 0.207908 0.924171
1 2 3 4
Weights -0.081905 0.020709 0.18118 0.452365
1 2 3 4
Weights -0.090936 -0.337873 -0.008563 1.108522
1 2 3 4
Weights -0.17631 -0.15038 0.052933 0.84519
1 2 3 4
Weights -0.197502 -0.054469 0.150557 0.744797
1 2 3 4
Weights -0.249607 -0.168915 0.202017 0.969411
1 2 3 4
Weights -0.186602 -0.250672 0.028347 1.024607
1 2 3 4
Weights -0.295408 -0.074059 0.254786 0.857769
1 2 3 4
Weights -0.278181 -0.255107 -0.046089 1.284861
1 2 3 4
Weights -0.140321 -0.302712 0.008007 1.050238
1 2 3 4
Weights -0.065541 -0.375644 -0.153023 1.201588
1 2 3 4
Weights -0.114604 -0.007087 0.106163 0.690392
1 2 3 4
Weights -0.221077 -0.069991 0.156811 0.761734
1 2 3 4
Weights -0.213202 -0.156383 0.106958 0.86607
1 2 3 4
Weights -0.231572 -0.225789 0.070145 1.009614
1 2 3 4
Weights -0.13163 -0.092241 0.122976 0.806514
1 2 3 4
Weights -0.193353 -0.015786 0.2325 0.894134
1 2 3 4
Weights -0.01773 0.082751 0.200099 0.618399
1 2 3 4
Weights -0.027261 0.001522 0.173655 0.485061
1 2 3 4
Weights -0.106252 -0.01588 0.08423 0.643436
1 2 3 4
Weights -0.174108 -0.180398 0.105813 0.862795
1 2 3 4
Weights -0.283139 -0.28447 0.171813 1.065259
1 2 3 4
Weights -0.260301 -0.436188 0.137377 1.317165
1 2 3 4
Weights -0.023855 0.061786 0.137051 0.349175
1 2 3 4
Weights -0.220799 -0.053203 0.162415 0.771043
1 2 3 4
Weights -0.29465 -0.253122 0.171223 1.107384
1 2 3 4
Weights -0.111048 0.005906 0.071667 0.804333
1 2 3 4
Weights -0.307648 -0.245705 0.164994 1.155758
1 2 3 4
Weights -0.222862 -0.148424 0.277245 0.69691
1 2 3 4
Weights -0.091749 -0.01778 0.165213 0.615907
1 2 3 4
Weights 0.090587 0.18969 0.179096 0.39931
1 2 3 4
Weights -0.11932 -0.09302 0.124438 0.779671
1 2 3 4
Weights -0.174862 -0.016487 0.258474 0.688288
1 2 3 4
Weights -0.316011 -0.084924 0.199031 0.828304
1 2 3 4
Weights -0.086361 -0.003804 0.108139 0.55755
1 2 3 4
Weights -0.065007 0.068935 0.111333 0.626496
1 2 3 4
Weights -0.224954 -0.282546 0.056688 1.043588
1 2 3 4
Weights -0.154891 -0.336349 0.045148 1.022637
1 2 3 4
Weights 0.056781 0.092981 0.180715 0.489109
1 2 3 4
Weights -0.268949 -0.367832 0.154487 1.173875
Predicting t+4...
t+1 t+2 t+3 t+4
R2 8.546575e-01 7.834157e-01 6.728013e-01 5.547190e-01
RMSE 5.557481e+01 6.828789e+01 8.448877e+01 9.921282e+01
MSE 3.088559e+03 4.663236e+03 7.138352e+03 9.843185e+03
MAE -1.662754e+10 -1.873527e+10 -2.213845e+10 -2.726315e+10
MAPE -1.662754e+12 -1.873527e+12 -2.213845e+12 -2.726315e+12
MPE 1.293655e+12 1.702967e+12 1.658134e+12 7.143199e+11
CPU times: user 1min 8s, sys: 1.75 s, total: 1min 10s
Wall time: 1min 12s
In [95]:
plot_diff_along_time(X_test, artb_preds_s)
In [96]:
plot_bispecific(X_test, baseline_preds, artb_preds, artb_preds_s, order, limit_t, j, s)
In [97]:
plot_qualitative_analysis(artb_preds, X_test, limit_t, order, subway_stations, del_hours)
In [98]:
plot_qualitative_analysis(artb_preds_s, X_test, limit_t, order, subway_stations, del_hours)
In [99]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(artb_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per t + baseline")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="General baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of full baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [115]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(artb_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station per t + baseline")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

# plt.ylim((0, 1000))
plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [114]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])

x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(artb_scores).T[1], np.array(artb_scores_s).T[1])).T

baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["General baseline", "Baseline per station", "AR per t + baseline", "AR per station per t + baseline"]

arlineObjects = plt.plot(x, model_scores, linewidth=3)

for i, m in zip(range(4), ['D', '*']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)

# plt.ylim((0, model_scores[:, 0].max()))
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

ARMA

In [102]:
p, q, limit_t = 4, 1, 4
In [103]:
%%time
arma_preds, arma_scores = arma_plot_results(None, p, q, limit_t)
Fitting t+1...
1 2 3 4
Weights -0.07191 -0.14902 0.289492 0.849837
1 2 3 4 5
Weights -0.126518 -0.085693 0.271438 0.826482 -0.26459
Predicting t+1...
Fitting t+2...
1 2 3 4
Weights -0.07191 -0.14902 0.289492 0.849837
1 2 3 4 5
Weights -0.071228 -0.148932 0.289309 0.849466 0.000025
Predicting t+2...
Fitting t+3...
1 2 3 4
Weights -0.07191 -0.14902 0.289492 0.849837
1 2 3 4 5
Weights -0.078192 -0.148929 0.290042 0.849845 -0.00447
Predicting t+3...
Fitting t+4...
1 2 3 4
Weights -0.07191 -0.14902 0.289492 0.849837
1 2 3 4 5
Weights -0.102054 -0.148023 0.288995 0.849119 -0.034765
Predicting t+4...
t+1 t+2 t+3 t+4
R2 0.957136 -9.028013e+04 -0.804462 0.504497
RMSE 75.463789 1.095198e+05 489.629763 256.576689
MSE 5694.783430 1.199458e+10 239737.305273 65831.597327
MAE 0.420615 5.728840e+02 2.722510 1.506659
MAPE 42.061516 5.728840e+04 272.251033 150.665925
MPE -27.609317 1.292698e+04 -79.646515 -114.905210
CPU times: user 21 s, sys: 16.2 s, total: 37.3 s
Wall time: 36 s
In [104]:
plot_diff_along_time(X_test, arma_preds)
In [105]:
%%time
arma_preds_s, arma_scores_s = arma_plot_results('s', p, q, limit_t)
Fitting t+1...
1 2 3 4
Weights -0.255585 -0.01136 0.313564 0.818924
1 2 3 4 5
Weights -0.301877 0.038714 0.316836 0.783817 -0.166358
1 2 3 4
Weights -0.055682 0.053627 0.145081 0.731377
1 2 3 4 5
Weights -0.148167 0.092167 0.162881 0.711256 -0.261379
1 2 3 4
Weights -0.086472 -0.290653 0.199221 1.072153
1 2 3 4 5
Weights -0.101818 -0.25437 0.175449 1.058314 -0.152445
1 2 3 4
Weights -0.074532 -0.256497 0.560001 0.652402
1 2 3 4 5
Weights -0.125158 -0.196642 0.544164 0.63041 -0.163545
1 2 3 4
Weights 0.04503 -0.041891 0.158835 0.788967
1 2 3 4 5
Weights -0.021898 -0.000869 0.162084 0.787943 -0.324685
1 2 3 4
Weights -0.071104 -0.204754 0.120356 1.038711
1 2 3 4 5
Weights -0.088034 -0.17991 0.097443 1.026794 -0.189187
1 2 3 4
Weights -0.08688 -0.142759 0.415225 0.736634
1 2 3 4 5
Weights -0.175706 -0.036626 0.409164 0.688959 -0.299288
1 2 3 4
Weights -0.185678 -0.066327 0.101592 1.049717
1 2 3 4 5
Weights -0.203836 -0.042561 0.102518 1.022755 -0.18841
1 2 3 4
Weights -0.129815 -0.097025 0.078189 1.056087
1 2 3 4 5
Weights -0.144522 -0.074469 0.071351 1.031934 -0.213955
1 2 3 4
Weights -0.021637 -0.243946 -0.082888 1.25544
1 2 3 4 5
Weights -0.01616 -0.241545 -0.098127 1.254794 -0.089729
1 2 3 4
Weights -0.158904 -0.222866 0.646848 0.591004
1 2 3 4 5
Weights -0.189703 -0.184517 0.63613 0.575962 -0.090738
1 2 3 4
Weights -0.041966 -0.098744 0.246228 0.797511
1 2 3 4 5
Weights -0.086647 -0.061302 0.236679 0.788158 -0.184932
1 2 3 4
Weights 0.302188 -0.254555 0.42137 0.454084
1 2 3 4 5
Weights 0.294826 -0.252478 0.423469 0.454519 -0.017234
1 2 3 4
Weights -0.033364 -0.239709 0.502055 0.69789
1 2 3 4 5
Weights -0.066307 -0.197416 0.487136 0.687737 -0.124604
1 2 3 4
Weights 0.015932 -0.166423 0.240362 0.848702
1 2 3 4 5
Weights -0.039973 -0.112987 0.21494 0.843746 -0.325048
1 2 3 4
Weights -0.061307 -0.197824 0.167681 0.994309
1 2 3 4 5
Weights -0.088042 -0.162499 0.135918 0.983722 -0.267892
1 2 3 4
Weights -0.080071 -0.155193 0.237343 0.907938
1 2 3 4 5
Weights -0.114298 -0.112325 0.217573 0.892631 -0.212397
1 2 3 4
Weights -0.054032 0.094842 0.238382 0.609204
1 2 3 4 5
Weights -0.183236 0.169464 0.266412 0.576515 -0.298617
1 2 3 4
Weights -0.136526 -0.028739 0.178496 0.911827
1 2 3 4 5
Weights -0.186213 0.02143 0.184476 0.877569 -0.285283
1 2 3 4
Weights 0.036465 -0.410082 0.254123 1.017688
1 2 3 4 5
Weights 0.028138 -0.379602 0.209243 1.020035 -0.179172
1 2 3 4
Weights -0.138808 -0.140558 0.222161 0.9519
1 2 3 4 5
Weights -0.163749 -0.104291 0.210498 0.931676 -0.15907
1 2 3 4
Weights -0.101423 -0.189763 0.264213 0.914141
1 2 3 4 5
Weights -0.144586 -0.136051 0.239759 0.893352 -0.238578
1 2 3 4
Weights -0.069466 -0.255923 0.13 1.076936
1 2 3 4 5
Weights -0.077615 -0.236729 0.109376 1.071513 -0.118975
1 2 3 4
Weights -0.185295 -0.016679 -0.074843 1.191167
1 2 3 4 5
Weights -0.184377 -0.017162 -0.071758 1.18343 -0.049251
1 2 3 4
Weights 0.009591 -0.300259 0.464256 0.756829
1 2 3 4 5
Weights -0.071167 -0.204229 0.430355 0.734438 -0.358732
1 2 3 4
Weights -0.004418 -0.162062 0.325373 0.773807
1 2 3 4 5
Weights -0.111359 -0.061285 0.315127 0.742014 -0.431094
1 2 3 4
Weights -0.253484 0.068496 0.305645 0.809106
1 2 3 4 5
Weights -0.312957 0.141245 0.322171 0.75792 -0.229806
1 2 3 4
Weights 0.051855 -0.234672 0.396711 0.712681
1 2 3 4 5
Weights -0.010917 -0.174475 0.372227 0.711132 -0.251978
1 2 3 4
Weights -0.06007 -0.049043 0.263158 0.724431
1 2 3 4 5
Weights -0.174819 0.026163 0.267766 0.693039 -0.329714
1 2 3 4
Weights 0.121114 -0.398591 0.344617 0.847636
1 2 3 4 5
Weights 0.09309 -0.370141 0.307855 0.853872 -0.213165
1 2 3 4
Weights -0.085934 -0.380161 0.471132 0.929467
1 2 3 4 5
Weights -0.1174 -0.30324 0.432684 0.906591 -0.211609
1 2 3 4
Weights -0.022663 -0.364238 0.241989 1.040159
1 2 3 4 5
Weights -0.033993 -0.335185 0.216516 1.032992 -0.127256
1 2 3 4
Weights -0.100184 -0.107912 0.18664 0.925849
1 2 3 4 5
Weights -0.165222 -0.046099 0.178626 0.888136 -0.371389
1 2 3 4
Weights -0.05212 -0.216472 0.344732 0.843072
1 2 3 4 5
Weights -0.106799 -0.147028 0.317357 0.821591 -0.267929
1 2 3 4
Weights 0.170159 0.113226 -0.024498 0.634672
1 2 3 4 5
Weights -0.011162 0.13328 0.001487 0.657869 -0.46071
1 2 3 4
Weights -0.030164 -0.123085 0.301298 0.791196
1 2 3 4 5
Weights -0.11788 -0.029435 0.29451 0.757886 -0.396482
1 2 3 4
Weights -0.051505 0.010284 0.142652 0.797204
1 2 3 4 5
Weights -0.131128 0.050075 0.153715 0.777071 -0.286909
1 2 3 4
Weights -0.095866 -0.111411 0.232374 0.843406
1 2 3 4 5
Weights -0.137109 -0.072757 0.219872 0.826912 -0.188433
1 2 3 4
Weights 0.039805 -0.386263 0.324459 0.959719
1 2 3 4 5
Weights 0.025487 -0.334746 0.260546 0.962572 -0.321916
1 2 3 4
Weights -0.035947 -0.371006 0.345183 0.990275
1 2 3 4 5
Weights -0.06441 -0.295837 0.283587 0.977841 -0.335637
1 2 3 4
Weights -0.000588 0.001096 0.316104 0.615533
1 2 3 4 5
Weights -0.121741 0.092762 0.332171 0.590638 -0.320664
1 2 3 4
Weights 0.117621 0.113051 0.004942 0.647582
1 2 3 4 5
Weights 0.139884 0.111791 0.000997 0.640647 0.045505
1 2 3 4
Weights -0.139885 0.094827 0.219763 0.740501
1 2 3 4 5
Weights -0.264386 0.190167 0.261423 0.675132 -0.375071
1 2 3 4
Weights -0.150142 -0.012396 0.204563 0.840102
1 2 3 4 5
Weights -0.209085 0.033593 0.208405 0.809642 -0.226244
1 2 3 4
Weights -0.131747 -0.110322 0.28954 0.867265
1 2 3 4 5
Weights -0.171315 -0.059317 0.27807 0.844343 -0.195438
1 2 3 4
Weights 0.055366 -0.559806 0.343571 1.066679
1 2 3 4 5
Weights 0.048991 -0.533014 0.317193 1.06378 -0.093523
1 2 3 4
Weights -0.04913 -0.06709 0.20988 0.805025
1 2 3 4 5
Weights -0.094842 -0.034816 0.204305 0.794602 -0.180364
1 2 3 4
Weights -0.064604 -0.19958 0.154244 1.043617
1 2 3 4 5
Weights -0.074129 -0.168421 0.129691 1.029981 -0.206699
1 2 3 4
Weights -0.172486 -0.207997 0.384411 0.899286
1 2 3 4 5
Weights -0.208378 -0.146748 0.365907 0.873522 -0.172927
1 2 3 4
Weights 0.280663 0.076554 0.075619 0.503552
1 2 3 4 5
Weights 0.11138 0.123419 0.106139 0.533512 -0.393033
1 2 3 4
Weights -0.023608 -0.217333 0.123052 1.039497
1 2 3 4 5
Weights -0.035244 -0.194323 0.071443 1.043939 -0.34466
1 2 3 4
Weights -0.060199 -0.109318 0.389158 0.671495
1 2 3 4 5
Weights -0.14224 -0.037431 0.381078 0.648124 -0.236396
1 2 3 4
Weights -0.103878 -0.049245 0.226935 0.853896
1 2 3 4 5
Weights -0.125749 -0.025098 0.222178 0.844242 -0.111936
1 2 3 4
Weights 0.064955 -0.32729 0.273072 0.924667
1 2 3 4 5
Weights 0.035944 -0.283794 0.198111 0.945072 -0.418853
1 2 3 4
Weights -0.217135 0.108121 0.38163 0.611587
1 2 3 4 5
Weights -0.286519 0.167074 0.392045 0.579854 -0.17187
1 2 3 4
Weights 0.046668 0.254702 -0.008694 0.567613
1 2 3 4 5
Weights -0.098996 0.276949 0.037155 0.556115 -0.286591
1 2 3 4
Weights -0.144928 -0.148965 0.15578 1.064264
1 2 3 4 5
Weights -0.159042 -0.114213 0.139647 1.044414 -0.211847
1 2 3 4
Weights 0.102255 -0.094667 0.168106 0.722174
1 2 3 4 5
Weights 0.033049 -0.070672 0.160125 0.733507 -0.209061
1 2 3 4
Weights 0.007507 -0.309365 0.068561 1.122165
1 2 3 4 5
Weights 0.007798 -0.311008 0.071637 1.122009 0.01197
1 2 3 4
Weights -0.056488 -0.093323 0.301315 0.759553
1 2 3 4 5
Weights -0.116651 -0.040439 0.291939 0.744091 -0.214249
1 2 3 4
Weights -0.088228 -0.108627 0.095778 1.018565
1 2 3 4 5
Weights -0.121651 -0.075809 0.077814 0.994069 -0.414774
1 2 3 4
Weights 0.118567 0.115323 0.059806 0.608699
1 2 3 4 5
Weights -0.023751 0.151929 0.093772 0.610235 -0.32273
1 2 3 4
Weights 0.202978 -0.116824 0.268232 0.583766
1 2 3 4 5
Weights 0.079411 -0.06339 0.277869 0.600276 -0.338921
1 2 3 4
Weights -0.207456 0.030362 0.349792 0.726016
1 2 3 4 5
Weights -0.307303 0.128854 0.366516 0.66717 -0.290452
1 2 3 4
Weights 0.107526 0.034438 -0.100424 0.865525
1 2 3 4 5
Weights 0.068855 0.021511 -0.100378 0.884912 -0.174664
1 2 3 4
Weights -0.06008 -0.23499 0.064896 1.127952
1 2 3 4 5
Weights -0.064435 -0.215362 0.035666 1.119952 -0.203583
1 2 3 4
Weights -0.214086 -0.064483 0.353301 0.786902
1 2 3 4 5
Weights -0.260948 -0.012508 0.347978 0.758298 -0.156213
1 2 3 4
Weights -0.11463 -0.124054 -0.082009 1.240807
1 2 3 4 5
Weights -0.099832 -0.12646 -0.09644 1.222337 -0.291994
1 2 3 4
Weights 0.051376 0.014064 0.042212 0.812644
1 2 3 4 5
Weights -0.04407 0.031978 0.050558 0.81951 -0.403153
1 2 3 4
Weights -0.014391 -0.274008 0.012119 1.158152
1 2 3 4 5
Weights -0.015282 -0.267473 -0.001326 1.156088 -0.076314
1 2 3 4
Weights -0.148375 0.06181 0.358565 0.615264
1 2 3 4 5
Weights -0.235034 0.12866 0.368113 0.586152 -0.210947
1 2 3 4
Weights -0.194388 -0.182617 0.506295 0.773552
1 2 3 4 5
Weights -0.275821 -0.064315 0.490328 0.718733 -0.272777
1 2 3 4
Weights -0.099093 -0.024259 -0.100024 1.13517
1 2 3 4 5
Weights -0.0923 -0.041444 -0.104355 1.122357 -0.253193
1 2 3 4
Weights -0.140298 -0.083588 0.348142 0.721826
1 2 3 4 5
Weights -0.209157 -0.02887 0.340413 0.697474 -0.190292
1 2 3 4
Weights 0.000159 0.000057 0.321995 0.602528
1 2 3 4 5
Weights -0.070131 0.050836 0.327343 0.594814 -0.182876
1 2 3 4
Weights 0.313583 0.032354 0.036196 0.393361
1 2 3 4 5
Weights 0.113126 0.057807 0.048988 0.41895 -0.312284
1 2 3 4
Weights -0.033068 -0.340202 0.600944 0.706937
1 2 3 4 5
Weights -0.134675 -0.202724 0.587612 0.644757 -0.384071
1 2 3 4
Weights -0.231421 -0.092669 0.371942 0.87992
1 2 3 4 5
Weights -0.277685 -0.020043 0.366602 0.84207 -0.20418
1 2 3 4
Weights -0.170143 -0.145339 0.233367 0.976749
1 2 3 4 5
Weights -0.194309 -0.10685 0.224027 0.953861 -0.149063
1 2 3 4
Weights -0.010278 -0.010987 0.273435 0.612782
1 2 3 4 5
Weights -0.098292 0.032306 0.277483 0.603154 -0.194858
1 2 3 4
Weights -0.156512 -0.09238 0.328982 0.820704
1 2 3 4 5
Weights -0.191284 -0.049063 0.321607 0.79982 -0.143397
1 2 3 4
Weights 0.004969 -0.458166 0.406672 0.938217
1 2 3 4 5
Weights -0.010787 -0.422388 0.376605 0.931806 -0.126202
1 2 3 4
Weights -0.08685 -0.041725 -0.073327 1.100163
1 2 3 4 5
Weights -0.088092 -0.054659 -0.081786 1.08816 -0.277469
1 2 3 4
Weights 0.082665 -0.067993 -0.019753 0.882192
1 2 3 4 5
Weights 0.064912 -0.069323 -0.023798 0.887023 -0.081597
1 2 3 4
Weights -0.173816 -0.125638 0.145103 1.053924
1 2 3 4 5
Weights -0.199723 -0.083927 0.137372 1.022166 -0.230817
1 2 3 4
Weights -0.034862 -0.347491 0.267008 0.990461
1 2 3 4 5
Weights -0.042277 -0.332144 0.251884 0.98857 -0.06227
1 2 3 4
Weights -0.202948 -0.05138 0.256593 0.906465
1 2 3 4 5
Weights -0.264864 0.027147 0.262748 0.851514 -0.290138
1 2 3 4
Weights -0.111722 -0.078554 0.276436 0.806268
1 2 3 4 5
Weights -0.169122 -0.025327 0.270185 0.781755 -0.212429
1 2 3 4
Weights -0.127425 -0.2547 0.324727 0.949023
1 2 3 4 5
Weights -0.151548 -0.211296 0.305643 0.931869 -0.138024
1 2 3 4
Weights -0.036441 -0.147237 0.376229 0.700271
1 2 3 4 5
Weights -0.087775 -0.102807 0.363928 0.69057 -0.160164
1 2 3 4
Weights 0.001857 -0.369751 0.133213 1.143233
1 2 3 4 5
Weights 0.000791 -0.342585 0.091556 1.143645 -0.1658
1 2 3 4
Weights 0.010257 -0.499282 0.366416 1.01717
1 2 3 4 5
Weights -0.000346 -0.469832 0.343926 1.010478 -0.089711
1 2 3 4
Weights -0.174195 -0.109095 0.2964 0.859678
1 2 3 4 5
Weights -0.216182 -0.06068 0.2864 0.835027 -0.17356
1 2 3 4
Weights -0.05009 -0.178747 0.314264 0.865257
1 2 3 4 5
Weights -0.0997 -0.107937 0.289826 0.845626 -0.297383
1 2 3 4
Weights -0.068862 -0.284481 0.388201 0.842867
1 2 3 4 5
Weights -0.100821 -0.239343 0.363614 0.829674 -0.151099
1 2 3 4
Weights -0.047376 -0.222041 0.304954 0.869161
1 2 3 4 5
Weights -0.088576 -0.168726 0.276596 0.855116 -0.233135
1 2 3 4
Weights -0.087775 -0.256047 0.165117 1.068152
1 2 3 4 5
Weights -0.098229 -0.232327 0.149931 1.057123 -0.111114
1 2 3 4
Weights -0.146824 -0.275863 0.269877 1.048894
1 2 3 4 5
Weights -0.174736 -0.219821 0.25416 1.020522 -0.161178
1 2 3 4
Weights -0.060379 -0.281576 0.46896 0.789163
1 2 3 4 5
Weights -0.135944 -0.177433 0.437526 0.753693 -0.325131
1 2 3 4
Weights -0.027073 -0.477138 0.552234 0.857717
1 2 3 4 5
Weights -0.063079 -0.402698 0.505652 0.839474 -0.236913
1 2 3 4
Weights -0.031205 -0.31744 0.479283 0.786598
1 2 3 4 5
Weights -0.07572 -0.249119 0.444762 0.770603 -0.224578
1 2 3 4
Weights 0.019677 -0.192076 0.465378 0.644047
1 2 3 4 5
Weights -0.060653 -0.10718 0.452446 0.627114 -0.274191
1 2 3 4
Weights -0.175035 -0.047698 0.165238 0.949378
1 2 3 4 5
Weights -0.208685 -0.012903 0.166017 0.920318 -0.198538
1 2 3 4
Weights -0.013304 -0.260061 0.275381 0.953007
1 2 3 4 5
Weights -0.042667 -0.193715 0.219253 0.948942 -0.385864
1 2 3 4
Weights -0.060145 -0.017365 0.121062 0.834593
1 2 3 4 5
Weights -0.124713 0.013327 0.124578 0.818412 -0.267616
1 2 3 4
Weights -0.11589 -0.02772 0.070577 0.980401
1 2 3 4 5
Weights -0.13909 -0.010297 0.072291 0.957823 -0.208609
1 2 3 4
Weights -0.137855 0.019047 0.257351 0.773571
1 2 3 4 5
Weights -0.195531 0.072371 0.261851 0.745559 -0.210462
1 2 3 4
Weights -0.14242 -0.224934 0.231638 1.035725
1 2 3 4 5
Weights -0.160049 -0.188883 0.218489 1.016908 -0.127598
1 2 3 4
Weights -0.096865 -0.089163 0.420466 0.653252
1 2 3 4 5
Weights -0.197098 0.004966 0.416073 0.615962 -0.272544
1 2 3 4
Weights -0.216429 -0.066451 0.32581 0.825182
1 2 3 4 5
Weights -0.299636 0.026018 0.327253 0.767929 -0.284698
1 2 3 4
Weights -0.127089 0.076313 0.103513 0.835885
1 2 3 4 5
Weights -0.209253 0.114081 0.134041 0.794134 -0.310603
1 2 3 4
Weights -0.011454 -0.396302 0.542473 0.810253
1 2 3 4 5
Weights -0.076402 -0.280653 0.485947 0.784021 -0.410057
1 2 3 4
Weights -0.02051 -0.120254 0.443025 0.604001
1 2 3 4 5
Weights -0.088819 -0.060495 0.436094 0.589792 -0.186751
1 2 3 4
Weights -0.0634 -0.119702 0.168568 0.880941
1 2 3 4 5
Weights -0.098992 -0.094451 0.155964 0.869946 -0.170758
1 2 3 4
Weights -0.207299 0.008564 0.188248 0.930583
1 2 3 4 5
Weights -0.264554 0.067455 0.209352 0.877774 -0.305141
1 2 3 4
Weights -0.08164 -0.116078 0.343197 0.786344
1 2 3 4 5
Weights -0.128045 -0.061577 0.330564 0.769107 -0.190372
1 2 3 4
Weights -0.075242 -0.131763 0.221482 0.890717
1 2 3 4 5
Weights -0.112702 -0.092114 0.205225 0.876551 -0.210687
1 2 3 4
Weights -0.068986 -0.028425 0.320641 0.627344
1 2 3 4 5
Weights -0.137444 0.012469 0.319327 0.614476 -0.158228
1 2 3 4
Weights -0.060834 -0.215924 0.224918 0.916893
1 2 3 4 5
Weights -0.0936 -0.181596 0.200921 0.906305 -0.181028
1 2 3 4
Weights 0.091637 -0.310991 0.271896 0.852402
1 2 3 4 5
Weights 0.016538 -0.277238 0.20586 0.878844 -0.519447
1 2 3 4
Weights -0.210401 0.005046 0.215832 0.866152
1 2 3 4 5
Weights -0.252854 0.045103 0.223475 0.831913 -0.176828
1 2 3 4
Weights -0.017376 -0.19665 0.259883 0.842372
1 2 3 4 5
Weights -0.079256 -0.14604 0.234361 0.830515 -0.280793
1 2 3 4
Weights -0.054596 -0.365346 0.436941 0.896725
1 2 3 4 5
Weights -0.095251 -0.285086 0.388619 0.877212 -0.271716
1 2 3 4
Weights 0.080526 0.15308 0.030096 0.690539
1 2 3 4 5
Weights -0.073887 0.213703 0.094668 0.665557 -0.519535
1 2 3 4
Weights -0.114153 -0.132032 0.283141 0.876998
1 2 3 4 5
Weights -0.146018 -0.088636 0.271215 0.857721 -0.166485
1 2 3 4
Weights -0.054835 -0.059695 0.227531 0.839687
1 2 3 4 5
Weights -0.114243 0.001293 0.226361 0.815386 -0.321882
1 2 3 4
Weights -0.07124 -0.243637 0.520031 0.724902
1 2 3 4 5
Weights -0.132066 -0.158714 0.498431 0.696671 -0.232488
1 2 3 4
Weights -0.054432 -0.368236 0.621986 0.736435
1 2 3 4 5
Weights -0.136295 -0.235622 0.577018 0.696266 -0.387016
1 2 3 4
Weights -0.018126 -0.327098 0.336516 0.909628
1 2 3 4 5
Weights -0.062532 -0.261924 0.292411 0.893743 -0.29774
1 2 3 4
Weights 0.193591 0.182285 0.049013 0.493012
1 2 3 4 5
Weights 0.012779 0.228757 0.096549 0.507336 -0.365907
1 2 3 4
Weights -0.144758 -0.147211 0.23333 0.955593
1 2 3 4 5
Weights -0.176022 -0.102514 0.218529 0.932459 -0.198111
1 2 3 4
Weights -0.067341 -0.261541 0.271173 0.983236
1 2 3 4 5
Weights -0.114845 -0.185086 0.225919 0.960287 -0.453527
1 2 3 4
Weights -0.16143 0.020023 0.100289 0.952162
1 2 3 4 5
Weights -0.193717 0.042929 0.112049 0.923385 -0.224753
1 2 3 4
Weights 0.097577 -0.406603 0.420521 0.816355
1 2 3 4 5
Weights 0.06851 -0.36548 0.380362 0.819692 -0.232448
1 2 3 4
Weights -0.129531 -0.047099 0.209134 0.929257
1 2 3 4 5
Weights -0.144666 -0.02058 0.205122 0.915215 -0.131043
1 2 3 4
Weights -0.103135 -0.349689 0.353397 0.982907
1 2 3 4 5
Weights -0.114607 -0.325225 0.340651 0.97468 -0.067989
1 2 3 4
Weights -0.024491 -0.172015 0.233097 0.917904
1 2 3 4 5
Weights -0.065098 -0.113458 0.199955 0.907898 -0.373813
1 2 3 4
Weights 0.024492 -0.06428 0.278425 0.669044
1 2 3 4 5
Weights -0.026893 -0.030616 0.276606 0.667363 -0.15056
1 2 3 4
Weights 0.015033 -0.168414 0.214371 0.868737
1 2 3 4 5
Weights -0.013237 -0.141018 0.195041 0.868445 -0.178401
1 2 3 4
Weights -0.118449 -0.236705 0.163722 1.085047
1 2 3 4 5
Weights -0.133202 -0.204319 0.148509 1.067542 -0.142773
1 2 3 4
Weights -0.13618 -0.043511 0.238105 0.868153
1 2 3 4 5
Weights -0.219516 0.044041 0.247933 0.814857 -0.373708
1 2 3 4
Weights -0.123087 -0.172625 0.298519 0.880914
1 2 3 4 5
Weights -0.182347 -0.101257 0.280779 0.845804 -0.264684
1 2 3 4
Weights -0.135197 -0.166029 0.241105 0.954836
1 2 3 4 5
Weights -0.153335 -0.136388 0.228901 0.940626 -0.122041
1 2 3 4
Weights 0.005223 -0.168892 0.367337 0.683269
1 2 3 4 5
Weights -0.08949 -0.097093 0.352079 0.668349 -0.291415
1 2 3 4
Weights -0.093757 -0.061781 0.219476 0.841735
1 2 3 4 5
Weights -0.118927 -0.039118 0.213485 0.833819 -0.115893
1 2 3 4
Weights -0.0473 -0.257459 0.333606 0.907321
1 2 3 4 5
Weights -0.071493 -0.210195 0.305072 0.896519 -0.183431
1 2 3 4
Weights -0.18871 0.051217 -0.058647 1.107192
1 2 3 4 5
Weights -0.194233 0.043488 -0.03666 1.074668 -0.23938
1 2 3 4
Weights -0.012666 -0.372996 0.494787 0.807934
1 2 3 4 5
Weights -0.073681 -0.281867 0.446792 0.787198 -0.332117
1 2 3 4
Weights -0.198087 -0.100607 0.285524 0.922427
1 2 3 4 5
Weights -0.239419 -0.039249 0.280302 0.884986 -0.209922
1 2 3 4
Weights -0.037168 -0.240259 0.19016 0.998595
1 2 3 4 5
Weights -0.053943 -0.208931 0.158119 0.99257 -0.206322
1 2 3 4
Weights 0.10371 -0.262809 0.371145 0.678856
1 2 3 4 5
Weights -0.024075 -0.196215 0.347114 0.680581 -0.424357
1 2 3 4
Weights -0.15825 -0.076183 0.298341 0.855888
1 2 3 4 5
Weights -0.225989 0.006185 0.295925 0.812036 -0.296611
1 2 3 4
Weights -0.17039 -0.017288 0.305664 0.785368
1 2 3 4 5
Weights -0.251382 0.063054 0.311246 0.740648 -0.274216
1 2 3 4
Weights -0.032295 -0.09653 0.340541 0.677644
1 2 3 4 5
Weights -0.081 -0.058328 0.333305 0.669993 -0.145802
1 2 3 4
Weights -0.012808 -0.052476 0.28308 0.717141
1 2 3 4 5
Weights -0.09575 0.015139 0.28314 0.700412 -0.278011
1 2 3 4
Weights -0.128861 -0.215211 0.118351 1.113698
1 2 3 4 5
Weights -0.132526 -0.207119 0.115093 1.108556 -0.03624
1 2 3 4
Weights -0.098565 -0.06401 0.12425 0.919392
1 2 3 4 5
Weights -0.127885 -0.04302 0.119296 0.904562 -0.164069
1 2 3 4
Weights -0.14249 0.102455 0.066075 0.912533
1 2 3 4 5
Weights -0.18552 0.127406 0.098108 0.872425 -0.272238
1 2 3 4
Weights 0.065655 -0.285342 0.304309 0.865765
1 2 3 4 5
Weights 0.01422 -0.223538 0.241663 0.879639 -0.490751
1 2 3 4
Weights -0.073317 -0.280684 0.32631 0.940181
1 2 3 4 5
Weights -0.100471 -0.22754 0.297136 0.92309 -0.192772
1 2 3 4
Weights -0.142418 0.038219 0.085639 0.943043
1 2 3 4 5
Weights -0.17652 0.064343 0.100019 0.911088 -0.249303
1 2 3 4
Weights -0.207308 -0.041345 0.335196 0.814874
1 2 3 4 5
Weights -0.253616 0.018141 0.332765 0.781537 -0.180428
1 2 3 4
Weights -0.028374 -0.226345 0.491865 0.692961
1 2 3 4 5
Weights -0.103015 -0.13896 0.466169 0.673135 -0.272784
1 2 3 4
Weights -0.080706 -0.158375 0.200527 0.960518
1 2 3 4 5
Weights -0.092906 -0.135022 0.181341 0.954007 -0.137526
1 2 3 4
Weights -0.016394 -0.124353 0.319456 0.769229
1 2 3 4 5
Weights -0.089118 -0.045946 0.307022 0.75084 -0.315729
1 2 3 4
Weights -0.133893 0.020416 0.360506 0.653255
1 2 3 4 5
Weights -0.241608 0.117564 0.372909 0.606858 -0.28482
1 2 3 4
Weights -0.140065 -0.140432 0.265255 0.950562
1 2 3 4 5
Weights -0.167071 -0.089281 0.249863 0.925639 -0.20643
1 2 3 4
Weights -0.104139 -0.107252 0.229987 0.880197
1 2 3 4 5
Weights -0.138237 -0.067565 0.220804 0.860218 -0.181886
1 2 3 4
Weights -0.195781 -0.069816 0.216669 0.94942
1 2 3 4 5
Weights -0.237918 -0.015202 0.217323 0.909887 -0.239841
1 2 3 4
Weights -0.214544 -0.128757 0.256784 0.997251
1 2 3 4 5
Weights -0.244887 -0.074577 0.252364 0.961845 -0.173852
1 2 3 4
Weights -0.16037 -0.11492 0.153685 1.023577
1 2 3 4 5
Weights -0.182495 -0.08006 0.147496 0.996427 -0.192115
1 2 3 4
Weights -0.221777 -0.163819 0.326647 0.960096
1 2 3 4 5
Weights -0.235971 -0.137149 0.322085 0.945052 -0.068959
1 2 3 4
Weights -0.089778 -0.135533 0.201065 0.905062
1 2 3 4 5
Weights -0.143059 -0.088748 0.183811 0.883434 -0.275839
1 2 3 4
Weights -0.125584 -0.181293 0.013866 1.213356
1 2 3 4 5
Weights -0.125678 -0.180626 0.013503 1.212794 -0.005026
1 2 3 4
Weights -0.019365 -0.11979 0.245406 0.80118
1 2 3 4 5
Weights -0.093524 -0.05579 0.225128 0.783937 -0.333085
1 2 3 4
Weights -0.163535 -0.143294 0.282095 0.937828
1 2 3 4 5
Weights -0.196565 -0.087471 0.271673 0.905636 -0.189974
1 2 3 4
Weights -0.0407 -0.240993 0.326819 0.900722
1 2 3 4 5
Weights -0.077751 -0.173822 0.296108 0.880421 -0.268835
1 2 3 4
Weights -0.202487 -0.193891 0.381398 0.901241
1 2 3 4 5
Weights -0.234098 -0.141523 0.368309 0.876349 -0.137877
1 2 3 4
Weights -0.062713 0.278859 -0.538007 1.160583
1 2 3 4 5
Weights -0.062536 0.285656 -0.541324 1.161185 0.017017
1 2 3 4
Weights -0.208089 -0.135435 0.45353 0.748619
1 2 3 4 5
Weights -0.233003 -0.102373 0.444503 0.734154 -0.083546
1 2 3 4
Weights 0.099728 -0.37421 0.63946 0.561694
1 2 3 4 5
Weights -0.005547 -0.287306 0.632238 0.531038 -0.400664
1 2 3 4
Weights -0.147911 -0.223315 0.328781 0.924289
1 2 3 4 5
Weights -0.165572 -0.193027 0.317244 0.910883 -0.090045
1 2 3 4
Weights -0.064857 -0.057518 0.321597 0.737255
1 2 3 4 5
Weights -0.155768 0.032531 0.323779 0.702606 -0.317004
1 2 3 4
Weights 0.16209 0.031063 -0.004958 0.53419
1 2 3 4 5
Weights 0.250703 0.026634 -0.010311 0.524714 0.140258
1 2 3 4
Weights -0.007037 -0.284018 0.384895 0.79564
1 2 3 4 5
Weights -0.076937 -0.213652 0.348801 0.781095 -0.29758
1 2 3 4
Weights -0.048177 -0.322875 0.104892 1.212352
1 2 3 4 5
Weights -0.03563 -0.287493 0.037376 1.218294 -0.307279
1 2 3 4
Weights -0.107267 -0.119918 0.258366 0.878488
1 2 3 4 5
Weights -0.150446 -0.067493 0.245348 0.85504 -0.228781
1 2 3 4
Weights -0.037997 -0.35202 0.466327 0.823829
1 2 3 4 5
Weights -0.072297 -0.297059 0.432882 0.811775 -0.179539
1 2 3 4
Weights -0.030031 -0.004559 0.05903 0.874914
1 2 3 4 5
Weights -0.101525 0.013825 0.064765 0.863674 -0.347442
1 2 3 4
Weights -0.228435 -0.063043 0.233921 0.967644
1 2 3 4 5
Weights -0.263651 -0.010668 0.237423 0.927277 -0.201824
1 2 3 4
Weights -0.117999 -0.124718 0.246435 0.950663
1 2 3 4 5
Weights -0.140802 -0.078861 0.231191 0.931258 -0.20645
1 2 3 4
Weights -0.093605 -0.006906 0.102649 0.876906
1 2 3 4 5
Weights -0.140468 0.017839 0.107705 0.857003 -0.237419
1 2 3 4
Weights -0.129545 -0.023044 0.18386 0.844481
1 2 3 4 5
Weights -0.16481 0.00337 0.182817 0.828328 -0.143651
1 2 3 4
Weights -0.092807 -0.131776 0.282139 0.89545
1 2 3 4 5
Weights -0.135528 -0.062109 0.26796 0.864955 -0.276032
1 2 3 4
Weights -0.009498 -0.311705 0.412706 0.824386
1 2 3 4 5
Weights -0.058379 -0.241062 0.371519 0.812219 -0.272322
1 2 3 4
Weights -0.128412 -0.118041 0.323202 0.825309
1 2 3 4 5
Weights -0.207218 -0.027601 0.314341 0.78039 -0.310403
1 2 3 4
Weights -0.161916 -0.163053 0.330386 0.87085
1 2 3 4 5
Weights -0.19754 -0.113694 0.317942 0.845986 -0.153035
1 2 3 4
Weights 0.010511 0.052463 0.053912 0.704616
1 2 3 4 5
Weights -0.022774 0.060096 0.05723 0.704266 -0.08847
1 2 3 4
Weights -0.126994 -0.19504 0.219551 1.019476
1 2 3 4 5
Weights -0.140732 -0.163204 0.20324 1.005652 -0.137548
1 2 3 4
Weights -0.115298 -0.20497 0.425823 0.816829
1 2 3 4 5
Weights -0.14926 -0.147692 0.407938 0.794936 -0.15755
1 2 3 4
Weights 0.002651 -0.348249 0.358977 0.939376
1 2 3 4 5
Weights -0.042838 -0.261083 0.292114 0.928168 -0.57994
1 2 3 4
Weights -0.108273 -0.129958 0.033109 1.106449
1 2 3 4 5
Weights -0.114576 -0.117131 0.021328 1.089729 -0.201836
1 2 3 4
Weights -0.044944 -0.26287 0.476066 0.758691
1 2 3 4 5
Weights -0.122103 -0.164395 0.440213 0.735713 -0.323515
1 2 3 4
Weights -0.013013 -0.317016 0.426181 0.842591
1 2 3 4 5
Weights -0.096745 -0.205417 0.373046 0.822236 -0.496099
1 2 3 4
Weights -0.159968 -0.172638 0.334498 0.881469
1 2 3 4 5
Weights -0.187016 -0.131905 0.322245 0.862366 -0.125226
1 2 3 4
Weights -0.037341 0.013818 0.223824 0.635338
1 2 3 4 5
Weights -0.096113 0.037445 0.225901 0.629648 -0.13005
1 2 3 4
Weights 0.100711 -0.191716 0.329805 0.692442
1 2 3 4 5
Weights -0.033844 -0.102611 0.3252 0.680022 -0.4856
1 2 3 4
Weights -0.208243 -0.148594 0.549474 0.67443
1 2 3 4 5
Weights -0.273312 -0.063099 0.535715 0.635273 -0.188388
1 2 3 4
Weights -0.141898 -0.110924 0.232945 0.927262
1 2 3 4 5
Weights -0.18603 -0.05347 0.222066 0.894602 -0.251166
1 2 3 4
Weights 0.381675 0.062528 0.063654 0.421444
1 2 3 4 5
Weights 0.221481 0.103526 0.095858 0.455116 -0.370137
1 2 3 4
Weights 0.029428 -0.45095 0.512332 0.853102
1 2 3 4 5
Weights -0.021864 -0.351155 0.423659 0.853431 -0.509783
1 2 3 4
Weights -0.032708 -0.262768 0.405787 0.806008
1 2 3 4 5
Weights -0.127088 -0.154641 0.373647 0.771675 -0.413328
1 2 3 4
Weights -0.083386 -0.227836 0.200279 1.03655
1 2 3 4 5
Weights -0.107185 -0.173542 0.16386 1.015572 -0.315065
1 2 3 4
Weights 0.015592 -0.129297 0.162262 0.860778
1 2 3 4 5
Weights -0.020101 -0.10635 0.147284 0.86088 -0.200246
1 2 3 4
Weights 0.025628 -0.33195 0.415192 0.805635
1 2 3 4 5
Weights -0.02599 -0.264897 0.369672 0.800407 -0.300724
1 2 3 4
Weights 0.053593 0.12952 0.066834 0.606904
1 2 3 4 5
Weights -0.056936 0.151213 0.089888 0.605849 -0.222643
1 2 3 4
Weights -0.035522 -0.112468 0.319545 0.772909
1 2 3 4 5
Weights -0.097952 -0.045349 0.31152 0.752015 -0.275696
1 2 3 4
Weights -0.090998 -0.277814 0.439076 0.858399
1 2 3 4 5
Weights -0.171342 -0.155543 0.40394 0.814422 -0.391792
1 2 3 4
Weights -0.178236 -0.155317 0.165772 1.076691
1 2 3 4 5
Weights -0.19311 -0.124633 0.158788 1.055746 -0.142165
1 2 3 4
Weights -0.03619 -0.406361 0.481499 0.863265
1 2 3 4 5
Weights -0.091179 -0.314533 0.434125 0.836004 -0.31418
1 2 3 4
Weights -0.133475 -0.06212 0.189467 0.9403
1 2 3 4 5
Weights -0.170722 -0.014776 0.185342 0.910693 -0.262391
1 2 3 4
Weights -0.172166 -0.05671 0.170552 0.962851
1 2 3 4 5
Weights -0.20705 -0.01566 0.170345 0.930186 -0.222072
1 2 3 4
Weights -0.00556 -0.395592 0.505087 0.830839
1 2 3 4 5
Weights -0.069406 -0.289461 0.456882 0.800439 -0.388995
1 2 3 4
Weights -0.000324 0.01582 0.15117 0.694887
1 2 3 4 5
Weights -0.017333 0.021862 0.152139 0.694955 -0.04515
1 2 3 4
Weights 0.128607 -0.114295 0.245675 0.676194
1 2 3 4 5
Weights 0.030648 -0.063611 0.243191 0.685224 -0.311514
1 2 3 4
Weights -0.137674 -0.072332 0.140423 0.960372
1 2 3 4 5
Weights -0.172059 -0.039635 0.134399 0.936155 -0.229107
1 2 3 4
Weights 0.024927 -0.328772 0.376807 0.868938
1 2 3 4 5
Weights -0.047422 -0.23602 0.322964 0.853647 -0.541273
1 2 3 4
Weights -0.185035 -0.036549 0.217049 0.916408
1 2 3 4 5
Weights -0.246034 0.032886 0.224032 0.86669 -0.302187
1 2 3 4
Weights -0.243279 -0.045304 0.310472 0.889702
1 2 3 4 5
Weights -0.281751 0.008658 0.31161 0.854563 -0.173207
1 2 3 4
Weights -0.022376 -0.132982 0.280192 0.818175
1 2 3 4 5
Weights -0.07523 -0.076477 0.265142 0.8048 -0.254107
1 2 3 4
Weights -0.182001 -0.180653 0.320769 0.925469
1 2 3 4 5
Weights -0.216938 -0.126855 0.306197 0.898908 -0.169653
1 2 3 4
Weights -0.069672 -0.105255 0.324256 0.781141
1 2 3 4 5
Weights -0.144196 -0.026331 0.315724 0.753054 -0.291228
1 2 3 4
Weights -0.016895 -0.23254 0.153256 0.958345
1 2 3 4 5
Weights -0.027297 -0.222592 0.139052 0.958674 -0.074653
1 2 3 4
Weights 0.022481 -0.131577 0.21244 0.760523
1 2 3 4 5
Weights -0.069059 -0.089291 0.196243 0.759121 -0.302715
1 2 3 4
Weights -0.248839 -0.142512 0.462355 0.841014
1 2 3 4 5
Weights -0.30113 -0.056752 0.451265 0.799995 -0.195656
1 2 3 4
Weights 0.004716 -0.273489 0.594216 0.588842
1 2 3 4 5
Weights -0.093226 -0.175366 0.572276 0.561965 -0.316047
1 2 3 4
Weights -0.121799 0.023751 0.167091 0.856263
1 2 3 4 5
Weights -0.162647 0.056717 0.1756 0.833103 -0.195681
1 2 3 4
Weights -0.095559 -0.27724 0.229249 1.051423
1 2 3 4 5
Weights -0.10673 -0.247337 0.209443 1.041432 -0.115138
1 2 3 4
Weights -0.124513 -0.065585 0.129546 0.959842
1 2 3 4 5
Weights -0.147548 -0.043827 0.123799 0.944142 -0.170005
1 2 3 4
Weights -0.039269 -0.211149 0.208379 0.955195
1 2 3 4 5
Weights -0.085604 -0.15964 0.168447 0.94292 -0.387701
1 2 3 4
Weights -0.15437 -0.128932 0.363735 0.810107
1 2 3 4 5
Weights -0.205329 -0.06659 0.351627 0.781266 -0.189595
1 2 3 4
Weights -0.176825 0.003974 0.257393 0.82991
1 2 3 4 5
Weights -0.240745 0.071801 0.265549 0.786356 -0.258996
1 2 3 4
Weights 0.245072 -0.111408 0.129108 0.683725
1 2 3 4 5
Weights 0.133255 -0.087752 0.121056 0.729365 -0.422583
1 2 3 4
Weights -0.053653 -0.156746 0.180478 0.906685
1 2 3 4 5
Weights -0.123206 -0.107369 0.159914 0.882767 -0.361075
1 2 3 4
Weights -0.063273 -0.211343 0.440294 0.752009
1 2 3 4 5
Weights -0.099052 -0.164241 0.423635 0.73878 -0.141511
1 2 3 4
Weights -0.040329 -0.014287 0.176309 0.811266
1 2 3 4 5
Weights -0.120498 0.044174 0.186722 0.785418 -0.353181
1 2 3 4
Weights -0.049439 -0.244092 0.255596 0.968896
1 2 3 4 5
Weights -0.065716 -0.202837 0.223726 0.960269 -0.201199
1 2 3 4
Weights -0.032799 -0.357149 0.425715 0.911441
1 2 3 4 5
Weights -0.073303 -0.270744 0.383116 0.886201 -0.320964
1 2 3 4
Weights -0.195576 -0.099886 0.398822 0.794825
1 2 3 4 5
Weights -0.239855 -0.037715 0.38998 0.763972 -0.165905
1 2 3 4
Weights -0.037229 -0.371153 0.460452 0.836582
1 2 3 4 5
Weights -0.071011 -0.317262 0.433307 0.818602 -0.17001
1 2 3 4
Weights -0.187042 -0.108681 0.179253 1.012636
1 2 3 4 5
Weights -0.205605 -0.078573 0.175956 0.98897 -0.134698
1 2 3 4
Weights -0.068056 -0.07185 0.216421 0.842565
1 2 3 4 5
Weights -0.124585 -0.019001 0.207875 0.821345 -0.276688
1 2 3 4
Weights 0.04445 0.063898 0.055683 0.763813
1 2 3 4 5
Weights 0.024177 0.068577 0.060347 0.765694 -0.067348
1 2 3 4
Weights -0.180262 -0.021293 0.266451 0.829585
1 2 3 4 5
Weights -0.225133 0.0257 0.268049 0.800734 -0.186352
1 2 3 4
Weights -0.224734 -0.079961 0.329663 0.877935
1 2 3 4 5
Weights -0.268999 -0.017243 0.327164 0.839998 -0.192134
1 2 3 4
Weights -0.02929 -0.015031 0.279392 0.715913
1 2 3 4 5
Weights -0.083463 0.033797 0.282267 0.704928 -0.189765
1 2 3 4
Weights -0.026334 -0.363024 0.227977 1.061936
1 2 3 4 5
Weights -0.037023 -0.33153 0.200574 1.053697 -0.139055
1 2 3 4
Weights 0.002243 -0.027064 0.152631 0.79091
1 2 3 4 5
Weights -0.12767 0.039184 0.17017 0.763699 -0.49742
1 2 3 4
Weights 0.057708 -0.333153 0.392261 0.839584
1 2 3 4 5
Weights -0.008998 -0.245729 0.339785 0.834108 -0.575118
1 2 3 4
Weights -0.049498 -0.313568 0.177703 1.071589
1 2 3 4 5
Weights -0.052016 -0.306889 0.171665 1.069804 -0.029807
1 2 3 4
Weights -0.06031 -0.040706 0.315503 0.662942
1 2 3 4 5
Weights -0.116192 -0.001514 0.313628 0.65211 -0.149456
1 2 3 4
Weights -0.07099 -0.313046 0.515653 0.799314
1 2 3 4 5
Weights -0.150107 -0.190587 0.482859 0.75449 -0.356653
1 2 3 4
Weights -0.006761 0.031234 0.114174 0.710793
1 2 3 4 5
Weights -0.072836 0.048104 0.119557 0.706438 -0.181515
1 2 3 4
Weights -0.00267 -0.395135 0.335507 0.973531
1 2 3 4 5
Weights -0.030795 -0.330126 0.278162 0.962032 -0.304066
1 2 3 4
Weights -0.145198 -0.114883 0.197498 0.956512
1 2 3 4 5
Weights -0.167693 -0.084648 0.1885 0.938312 -0.148482
1 2 3 4
Weights -0.029364 -0.206871 0.378952 0.747648
1 2 3 4 5
Weights -0.112256 -0.131521 0.358582 0.725623 -0.288931
1 2 3 4
Weights -0.030138 -0.321158 0.504516 0.778814
1 2 3 4 5
Weights -0.119886 -0.19531 0.470134 0.736827 -0.410663
1 2 3 4
Weights -0.131325 -0.153309 0.135789 1.04814
1 2 3 4 5
Weights -0.148551 -0.12395 0.121619 1.029049 -0.187315
1 2 3 4
Weights -0.17053 -0.024801 0.203526 0.921763
1 2 3 4 5
Weights -0.21715 0.029926 0.209387 0.883788 -0.268613
1 2 3 4
Weights -0.078537 -0.147354 0.07966 1.073456
1 2 3 4 5
Weights -0.092604 -0.124458 0.045978 1.058441 -0.441629
1 2 3 4
Weights -0.035369 -0.413796 0.355409 0.991736
1 2 3 4 5
Weights -0.051628 -0.373133 0.326383 0.981791 -0.133078
1 2 3 4
Weights -0.072281 -0.208971 0.047599 1.136149
1 2 3 4 5
Weights -0.076562 -0.188313 0.022454 1.121958 -0.223574
1 2 3 4
Weights -0.144512 0.028178 0.14538 0.871143
1 2 3 4 5
Weights -0.187005 0.060758 0.153812 0.842738 -0.197569
1 2 3 4
Weights -0.142803 -0.1101 0.340716 0.799687
1 2 3 4 5
Weights -0.211001 -0.033996 0.331802 0.762887 -0.248164
1 2 3 4
Weights -0.124621 -0.258389 0.456455 0.807841
1 2 3 4 5
Weights -0.18885 -0.169137 0.435183 0.767721 -0.237965
1 2 3 4
Weights -0.124354 -0.224497 0.26651 0.990473
1 2 3 4 5
Weights -0.14547 -0.181688 0.247049 0.972214 -0.160744
1 2 3 4
Weights -0.190709 -0.005547 0.131807 0.982056
1 2 3 4 5
Weights -0.198403 0.005669 0.132351 0.968149 -0.096455
1 2 3 4
Weights -0.077748 0.023033 0.323605 0.705532
1 2 3 4 5
Weights -0.125805 0.074541 0.328114 0.688434 -0.165194
1 2 3 4
Weights -0.035876 -0.18452 0.314834 0.865413
1 2 3 4 5
Weights -0.086232 -0.107208 0.290257 0.845548 -0.322902
1 2 3 4
Weights -0.047262 -0.002229 0.226202 0.70838
1 2 3 4 5
Weights -0.100056 0.028847 0.229485 0.6999 -0.154419
1 2 3 4
Weights 0.06832 -0.209011 0.25416 0.776763
1 2 3 4 5
Weights -0.001242 -0.168467 0.224498 0.784924 -0.281577
1 2 3 4
Weights -0.113635 -0.131823 0.146742 0.998626
1 2 3 4 5
Weights -0.13144 -0.10672 0.133096 0.984551 -0.166427
1 2 3 4
Weights -0.228127 -0.106717 0.231812 1.012897
1 2 3 4 5
Weights -0.246088 -0.074707 0.230484 0.990064 -0.113972
1 2 3 4
Weights -0.035312 -0.445716 0.497934 0.925608
1 2 3 4 5
Weights -0.078912 -0.332383 0.427828 0.899998 -0.390933
1 2 3 4
Weights -0.142696 0.205387 0.273841 0.485771
1 2 3 4 5
Weights -0.242771 0.252635 0.290015 0.461954 -0.180908
1 2 3 4
Weights -0.05705 -0.229491 0.407891 0.805639
1 2 3 4 5
Weights -0.126122 -0.137176 0.379529 0.778161 -0.31423
1 2 3 4
Weights -0.19431 -0.187309 0.349044 0.965446
1 2 3 4 5
Weights -0.217334 -0.138652 0.337178 0.941781 -0.13314
1 2 3 4
Weights -0.118332 -0.061904 0.073043 1.051207
1 2 3 4 5
Weights -0.125075 -0.0465 0.066545 1.035033 -0.192757
1 2 3 4
Weights 0.02993 -0.395992 0.469445 0.844986
1 2 3 4 5
Weights -0.036109 -0.290595 0.41714 0.821511 -0.488661
1 2 3 4
Weights -0.106968 -0.078205 0.389156 0.667093
1 2 3 4 5
Weights -0.247944 0.032538 0.389033 0.621338 -0.359554
1 2 3 4
Weights -0.078667 0.038441 0.139088 0.796303
1 2 3 4 5
Weights -0.140283 0.068241 0.150753 0.777097 -0.219256
1 2 3 4
Weights 0.056119 0.025977 0.18918 0.671473
1 2 3 4 5
Weights 0.004412 0.052455 0.197276 0.673098 -0.146068
1 2 3 4
Weights -0.046847 -0.174123 0.269078 0.860805
1 2 3 4 5
Weights -0.092026 -0.124083 0.248009 0.846103 -0.240655
1 2 3 4
Weights 0.052456 -0.173319 0.358314 0.687485
1 2 3 4 5
Weights -0.058143 -0.085338 0.341347 0.678487 -0.381237
1 2 3 4
Weights -0.157725 0.068374 0.119818 0.880075
1 2 3 4 5
Weights -0.243927 0.121957 0.162057 0.821824 -0.385887
1 2 3 4
Weights -0.015844 0.003139 0.041231 0.858716
1 2 3 4 5
Weights -0.067013 0.012168 0.045926 0.855116 -0.247175
1 2 3 4
Weights -0.011016 -0.191651 0.379528 0.746708
1 2 3 4 5
Weights -0.072096 -0.120639 0.361973 0.727671 -0.247291
1 2 3 4
Weights -0.078963 -0.334799 0.335658 0.972491
1 2 3 4 5
Weights -0.112807 -0.269078 0.298553 0.950912 -0.235257
1 2 3 4
Weights -0.134622 -0.24039 0.262206 1.015514
1 2 3 4 5
Weights -0.148952 -0.209598 0.248103 1.002227 -0.103727
1 2 3 4
Weights -0.001953 -0.096623 0.319345 0.715625
1 2 3 4 5
Weights -0.048901 -0.055654 0.312304 0.709924 -0.160983
1 2 3 4
Weights -0.157859 -0.320383 0.430045 0.96115
1 2 3 4 5
Weights -0.188808 -0.255025 0.407588 0.936099 -0.15772
Predicting t+1...
Fitting t+2...
1 2 3 4
Weights -0.255585 -0.01136 0.313564 0.818924
1 2 3 4 5
Weights -0.254636 -0.011615 0.313347 0.817705 0.000061
1 2 3 4
Weights -0.055682 0.053627 0.145081 0.731377
1 2 3 4 5
Weights -0.113626 0.057161 0.146338 0.729677 -0.068195
1 2 3 4
Weights -0.086472 -0.290653 0.199221 1.072153
1 2 3 4 5
Weights -0.086988 -0.290351 0.199644 1.071173 5.439773e-08
1 2 3 4
Weights -0.074532 -0.256497 0.560001 0.652402
1 2 3 4 5
Weights -0.075875 -0.256593 0.55937 0.650343 -0.000021
1 2 3 4
Weights 0.04503 -0.041891 0.158835 0.788967
1 2 3 4 5
Weights 0.06034 -0.042733 0.158567 0.787837 0.019613
1 2 3 4
Weights -0.071104 -0.204754 0.120356 1.038711
1 2 3 4 5
Weights -0.07106 -0.204757 0.120348 1.038701 4.872624e-07
1 2 3 4
Weights -0.08688 -0.142759 0.415225 0.736634
1 2 3 4 5
Weights -0.087702 -0.142851 0.415319 0.736504 -0.000037
1 2 3 4
Weights -0.185678 -0.066327 0.101592 1.049717
1 2 3 4 5
Weights -0.185767 -0.065845 0.101741 1.047308 0.000011
1 2 3 4
Weights -0.129815 -0.097025 0.078189 1.056087
1 2 3 4 5
Weights -0.130318 -0.096898 0.07831 1.05594 -0.000008
1 2 3 4
Weights -0.021637 -0.243946 -0.082888 1.25544
1 2 3 4 5
Weights -0.021741 -0.243899 -0.08285 1.255366 -1.866556e-07
1 2 3 4
Weights -0.158904 -0.222866 0.646848 0.591004
1 2 3 4 5
Weights -0.159622 -0.222899 0.646866 0.590455 -0.000005
1 2 3 4
Weights -0.041966 -0.098744 0.246228 0.797511
1 2 3 4 5
Weights -0.033594 -0.098014 0.244688 0.794028 0.00331
1 2 3 4
Weights 0.302188 -0.254555 0.42137 0.454084
1 2 3 4 5
Weights 0.267315 -0.254466 0.425735 0.448651 -0.047955
1 2 3 4
Weights -0.033364 -0.239709 0.502055 0.69789
1 2 3 4 5
Weights -0.032153 -0.239412 0.501508 0.697338 0.000014
1 2 3 4
Weights 0.015932 -0.166423 0.240362 0.848702
1 2 3 4 5
Weights 0.019775 -0.165988 0.239852 0.847845 0.00053
1 2 3 4
Weights -0.061307 -0.197824 0.167681 0.994309
1 2 3 4 5
Weights -0.060619 -0.197906 0.167546 0.994057 0.000004
1 2 3 4
Weights -0.080071 -0.155193 0.237343 0.907938
1 2 3 4 5
Weights -0.081853 -0.154958 0.23765 0.906876 -0.000042
1 2 3 4
Weights -0.054032 0.094842 0.238382 0.609204
1 2 3 4 5
Weights -0.082592 0.097988 0.238759 0.608189 -0.036601
1 2 3 4
Weights -0.136526 -0.028739 0.178496 0.911827
1 2 3 4 5
Weights -0.126732 -0.025282 0.17619 0.883443 0.000812
1 2 3 4
Weights 0.036465 -0.410082 0.254123 1.017688
1 2 3 4 5
Weights 0.032819 -0.405976 0.255785 1.011497 -2.227418e-07
1 2 3 4
Weights -0.138808 -0.140558 0.222161 0.9519
1 2 3 4 5
Weights -0.138788 -0.140533 0.222153 0.951889 3.151968e-07
1 2 3 4
Weights -0.101423 -0.189763 0.264213 0.914141
1 2 3 4 5
Weights -0.10134 -0.189758 0.264191 0.91412 6.829049e-07
1 2 3 4
Weights -0.069466 -0.255923 0.13 1.076936
1 2 3 4 5
Weights -0.070986 -0.254728 0.130418 1.075421 -0.000001
1 2 3 4
Weights -0.185295 -0.016679 -0.074843 1.191167
1 2 3 4 5
Weights -0.186571 -0.016243 -0.074403 1.190888 -0.000011
1 2 3 4
Weights 0.009591 -0.300259 0.464256 0.756829
1 2 3 4 5
Weights 0.007934 -0.300036 0.464586 0.755036 -0.000013
1 2 3 4
Weights -0.004418 -0.162062 0.325373 0.773807
1 2 3 4 5
Weights 0.008816 -0.158626 0.320039 0.759477 0.001086
1 2 3 4
Weights -0.253484 0.068496 0.305645 0.809106
1 2 3 4 5
Weights -0.247916 0.069274 0.303829 0.803884 0.000412
1 2 3 4
Weights 0.051855 -0.234672 0.396711 0.712681
1 2 3 4 5
Weights 0.051134 -0.2348 0.396787 0.71251 -0.000122
1 2 3 4
Weights -0.06007 -0.049043 0.263158 0.724431
1 2 3 4 5
Weights -0.052174 -0.049002 0.262629 0.723891 0.006251
1 2 3 4
Weights 0.121114 -0.398591 0.344617 0.847636
1 2 3 4 5
Weights 0.123158 -0.398028 0.343563 0.845513 0.000011
1 2 3 4
Weights -0.085934 -0.380161 0.471132 0.929467
1 2 3 4 5
Weights -0.087829 -0.377198 0.471413 0.924759 -8.838299e-09
1 2 3 4
Weights -0.022663 -0.364238 0.241989 1.040159
1 2 3 4 5
Weights -0.027619 -0.361023 0.244441 1.032558 -1.604474e-07
1 2 3 4
Weights -0.100184 -0.107912 0.18664 0.925849
1 2 3 4 5
Weights -0.097903 -0.107553 0.18611 0.923297 0.000125
1 2 3 4
Weights -0.05212 -0.216472 0.344732 0.843072
1 2 3 4 5
Weights -0.051285 -0.216251 0.343799 0.839602 -0.000033
1 2 3 4
Weights 0.170159 0.113226 -0.024498 0.634672
1 2 3 4 5
Weights 0.053165 0.127732 -0.018699 0.605235 -0.218202
1 2 3 4
Weights -0.030164 -0.123085 0.301298 0.791196
1 2 3 4 5
Weights -0.031596 -0.122572 0.301041 0.789993 -0.000667
1 2 3 4
Weights -0.051505 0.010284 0.142652 0.797204
1 2 3 4 5
Weights -0.075614 0.011114 0.14328 0.797217 -0.024604
1 2 3 4
Weights -0.095866 -0.111411 0.232374 0.843406
1 2 3 4 5
Weights -0.096745 -0.110816 0.232486 0.84244 -0.000597
1 2 3 4
Weights 0.039805 -0.386263 0.324459 0.959719
1 2 3 4 5
Weights 0.039231 -0.383401 0.324351 0.954838 -2.903160e-07
1 2 3 4
Weights -0.035947 -0.371006 0.345183 0.990275
1 2 3 4 5
Weights -0.037671 -0.368976 0.345991 0.984694 -4.168638e-08
1 2 3 4
Weights -0.000588 0.001096 0.316104 0.615533
1 2 3 4 5
Weights 0.016002 -0.000087 0.315776 0.615066 0.019497
1 2 3 4
Weights 0.117621 0.113051 0.004942 0.647582
1 2 3 4 5
Weights 0.043649 0.121971 0.014647 0.653718 -0.100625
1 2 3 4
Weights -0.139885 0.094827 0.219763 0.740501
1 2 3 4 5
Weights -0.147886 0.095266 0.219995 0.740614 -0.008013
1 2 3 4
Weights -0.150142 -0.012396 0.204563 0.840102
1 2 3 4 5
Weights -0.145166 -0.012059 0.203713 0.83694 0.002396
1 2 3 4
Weights -0.131747 -0.110322 0.28954 0.867265
1 2 3 4 5
Weights -0.131599 -0.110305 0.289497 0.867236 0.000002
1 2 3 4
Weights 0.055366 -0.559806 0.343571 1.066679
1 2 3 4 5
Weights 0.047936 -0.552914 0.348289 1.054923 -1.344317e-09
1 2 3 4
Weights -0.04913 -0.06709 0.20988 0.805025
1 2 3 4 5
Weights -0.048196 -0.067055 0.209819 0.804991 0.000536
1 2 3 4
Weights -0.064604 -0.19958 0.154244 1.043617
1 2 3 4 5
Weights -0.065615 -0.198244 0.154611 1.040138 -0.000003
1 2 3 4
Weights -0.172486 -0.207997 0.384411 0.899286
1 2 3 4 5
Weights -0.172991 -0.207963 0.384723 0.898429 -1.728328e-07
1 2 3 4
Weights 0.280663 0.076554 0.075619 0.503552
1 2 3 4 5
Weights 0.185199 0.106234 0.085916 0.481761 -0.214392
1 2 3 4
Weights -0.023608 -0.217333 0.123052 1.039497
1 2 3 4 5
Weights -0.024371 -0.217184 0.123249 1.039291 -0.000004
1 2 3 4
Weights -0.060199 -0.109318 0.389158 0.671495
1 2 3 4 5
Weights -0.06116 -0.109416 0.38925 0.671459 -0.00041
1 2 3 4
Weights -0.103878 -0.049245 0.226935 0.853896
1 2 3 4 5
Weights -0.09344 -0.046852 0.223029 0.831921 0.001243
1 2 3 4
Weights 0.064955 -0.32729 0.273072 0.924667
1 2 3 4 5
Weights 0.06482 -0.324724 0.273439 0.922103 -0.000007
1 2 3 4
Weights -0.217135 0.108121 0.38163 0.611587
1 2 3 4 5
Weights -0.205546 0.108438 0.380653 0.610152 0.009574
1 2 3 4
Weights 0.046668 0.254702 -0.008694 0.567613
1 2 3 4 5
Weights -0.061191 0.272465 -0.002057 0.548291 -0.169499
1 2 3 4
Weights -0.144928 -0.148965 0.15578 1.064264
1 2 3 4 5
Weights -0.14503 -0.14895 0.155849 1.064223 -8.624463e-08
1 2 3 4
Weights 0.102255 -0.094667 0.168106 0.722174
1 2 3 4 5
Weights 0.089871 -0.094563 0.16823 0.722266 -0.01384
1 2 3 4
Weights 0.007507 -0.309365 0.068561 1.122165
1 2 3 4 5
Weights 0.007479 -0.309363 0.068584 1.122088 3.492112e-07
1 2 3 4
Weights -0.056488 -0.093323 0.301315 0.759553
1 2 3 4 5
Weights -0.055406 -0.093216 0.301194 0.75948 0.000444
1 2 3 4
Weights -0.088228 -0.108627 0.095778 1.018565
1 2 3 4 5
Weights -0.085658 -0.10831 0.095455 1.013742 0.000093
1 2 3 4
Weights 0.118567 0.115323 0.059806 0.608699
1 2 3 4 5
Weights 0.001853 0.137998 0.065865 0.580176 -0.203331
1 2 3 4
Weights 0.202978 -0.116824 0.268232 0.583766
1 2 3 4 5
Weights 0.122604 -0.106718 0.266268 0.570494 -0.132863
1 2 3 4
Weights -0.207456 0.030362 0.349792 0.726016
1 2 3 4 5
Weights -0.199598 0.03219 0.34699 0.716147 0.001768
1 2 3 4
Weights 0.107526 0.034438 -0.100424 0.865525
1 2 3 4 5
Weights 0.067897 0.034951 -0.097874 0.865645 -0.058683
1 2 3 4
Weights -0.06008 -0.23499 0.064896 1.127952
1 2 3 4 5
Weights -0.062067 -0.233898 0.065843 1.126185 -0.000001
1 2 3 4
Weights -0.214086 -0.064483 0.353301 0.786902
1 2 3 4 5
Weights -0.21361 -0.064391 0.353125 0.785558 0.000062
1 2 3 4
Weights -0.11463 -0.124054 -0.082009 1.240807
1 2 3 4 5
Weights -0.117173 -0.122686 -0.080303 1.237977 -0.000003
1 2 3 4
Weights 0.051376 0.014064 0.042212 0.812644
1 2 3 4 5
Weights -0.010323 0.016558 0.042516 0.806598 -0.090978
1 2 3 4
Weights -0.014391 -0.274008 0.012119 1.158152
1 2 3 4 5
Weights -0.015299 -0.273621 0.012726 1.157169 -0.000002
1 2 3 4
Weights -0.148375 0.06181 0.358565 0.615264
1 2 3 4 5
Weights -0.139379 0.061877 0.358061 0.614638 0.007896
1 2 3 4
Weights -0.194388 -0.182617 0.506295 0.773552
1 2 3 4 5
Weights -0.19471 -0.182612 0.506417 0.773505 -2.268206e-07
1 2 3 4
Weights -0.099093 -0.024259 -0.100024 1.13517
1 2 3 4 5
Weights -0.103085 -0.023504 -0.099509 1.134698 -0.001126
1 2 3 4
Weights -0.140298 -0.083588 0.348142 0.721826
1 2 3 4 5
Weights -0.1393 -0.083485 0.347675 0.719995 0.000986
1 2 3 4
Weights 0.000159 0.000057 0.321995 0.602528
1 2 3 4 5
Weights 0.054788 -0.004346 0.319551 0.598577 0.06465
1 2 3 4
Weights 0.313583 0.032354 0.036196 0.393361
1 2 3 4 5
Weights 0.129086 0.049973 0.040998 0.385598 -0.254496
1 2 3 4
Weights -0.033068 -0.340202 0.600944 0.706937
1 2 3 4 5
Weights -0.034212 -0.339159 0.600562 0.703731 -0.000001
1 2 3 4
Weights -0.231421 -0.092669 0.371942 0.87992
1 2 3 4 5
Weights -0.234148 -0.089728 0.372387 0.8683 8.731991e-07
1 2 3 4
Weights -0.170143 -0.145339 0.233367 0.976749
1 2 3 4 5
Weights -0.170227 -0.14495 0.233397 0.975609 0.000001
1 2 3 4
Weights -0.010278 -0.010987 0.273435 0.612782
1 2 3 4 5
Weights -0.035547 -0.010337 0.273772 0.612895 -0.026481
1 2 3 4
Weights -0.156512 -0.09238 0.328982 0.820704
1 2 3 4 5
Weights -0.155816 -0.092271 0.328757 0.820362 0.000022
1 2 3 4
Weights 0.004969 -0.458166 0.406672 0.938217
1 2 3 4 5
Weights 0.001399 -0.455959 0.407874 0.932757 -4.640316e-08
1 2 3 4
Weights -0.08685 -0.041725 -0.073327 1.100163
1 2 3 4 5
Weights -0.087505 -0.041646 -0.073257 1.100162 -0.000296
1 2 3 4
Weights 0.082665 -0.067993 -0.019753 0.882192
1 2 3 4 5
Weights -0.096108 -0.066804 -0.017451 0.864568 -0.204843
1 2 3 4
Weights -0.173816 -0.125638 0.145103 1.053924
1 2 3 4 5
Weights -0.173965 -0.12503 0.14537 1.052304 0.000001
1 2 3 4
Weights -0.034862 -0.347491 0.267008 0.990461
1 2 3 4 5
Weights -0.037153 -0.346382 0.267973 0.98779 -4.103851e-07
1 2 3 4
Weights -0.202948 -0.05138 0.256593 0.906465
1 2 3 4 5
Weights -0.202492 -0.049914 0.255917 0.900322 0.000021
1 2 3 4
Weights -0.111722 -0.078554 0.276436 0.806268
1 2 3 4 5
Weights -0.112412 -0.078643 0.276524 0.806282 -0.000121
1 2 3 4
Weights -0.127425 -0.2547 0.324727 0.949023
1 2 3 4 5
Weights -0.129009 -0.254059 0.325513 0.947331 -2.526468e-07
1 2 3 4
Weights -0.036441 -0.147237 0.376229 0.700271
1 2 3 4 5
Weights -0.036587 -0.147269 0.376246 0.700272 -0.000037
1 2 3 4
Weights 0.001857 -0.369751 0.133213 1.143233
1 2 3 4 5
Weights 0.000193 -0.368773 0.134342 1.141123 -3.795825e-08
1 2 3 4
Weights 0.010257 -0.499282 0.366416 1.01717
1 2 3 4 5
Weights 0.003559 -0.494542 0.36954 1.007735 -6.155838e-09
1 2 3 4
Weights -0.174195 -0.109095 0.2964 0.859678
1 2 3 4 5
Weights -0.173972 -0.108614 0.296038 0.858331 0.00002
1 2 3 4
Weights -0.05009 -0.178747 0.314264 0.865257
1 2 3 4 5
Weights -0.042451 -0.173941 0.311391 0.852782 -0.00007
1 2 3 4
Weights -0.068862 -0.284481 0.388201 0.842867
1 2 3 4 5
Weights -0.070432 -0.284034 0.388393 0.841145 -0.000002
1 2 3 4
Weights -0.047376 -0.222041 0.304954 0.869161
1 2 3 4 5
Weights -0.049243 -0.221697 0.305486 0.866921 -0.000018
1 2 3 4
Weights -0.087775 -0.256047 0.165117 1.068152
1 2 3 4 5
Weights -0.091904 -0.252759 0.167615 1.060904 -6.728150e-07
1 2 3 4
Weights -0.146824 -0.275863 0.269877 1.048894
1 2 3 4 5
Weights -0.146954 -0.275787 0.269913 1.048859 -3.440443e-09
1 2 3 4
Weights -0.060379 -0.281576 0.46896 0.789163
1 2 3 4 5
Weights -0.061878 -0.281002 0.469402 0.787774 -0.000002
1 2 3 4
Weights -0.027073 -0.477138 0.552234 0.857717
1 2 3 4 5
Weights -0.030119 -0.474421 0.553313 0.852234 -1.305727e-08
1 2 3 4
Weights -0.031205 -0.31744 0.479283 0.786598
1 2 3 4 5
Weights -0.034371 -0.315036 0.480044 0.780959 -0.000003
1 2 3 4
Weights 0.019677 -0.192076 0.465378 0.644047
1 2 3 4 5
Weights 0.020374 -0.191221 0.464084 0.641815 -0.000881
1 2 3 4
Weights -0.175035 -0.047698 0.165238 0.949378
1 2 3 4 5
Weights -0.175374 -0.047689 0.165282 0.949428 -0.000015
1 2 3 4
Weights -0.013304 -0.260061 0.275381 0.953007
1 2 3 4 5
Weights -0.009068 -0.251577 0.278733 0.937993 -0.000005
1 2 3 4
Weights -0.060145 -0.017365 0.121062 0.834593
1 2 3 4 5
Weights -0.056695 -0.017558 0.120864 0.834423 0.003517
1 2 3 4
Weights -0.11589 -0.02772 0.070577 0.980401
1 2 3 4 5
Weights -0.117917 -0.027529 0.070763 0.980251 -0.000542
1 2 3 4
Weights -0.137855 0.019047 0.257351 0.773571
1 2 3 4 5
Weights -0.119971 0.019933 0.254928 0.767227 0.011197
1 2 3 4
Weights -0.14242 -0.224934 0.231638 1.035725
1 2 3 4 5
Weights -0.144194 -0.223781 0.232585 1.033234 1.182191e-07
1 2 3 4
Weights -0.096865 -0.089163 0.420466 0.653252
1 2 3 4 5
Weights -0.100718 -0.089618 0.420739 0.652451 -0.001165
1 2 3 4
Weights -0.216429 -0.066451 0.32581 0.825182
1 2 3 4 5
Weights -0.2166 -0.066525 0.325894 0.825172 -0.00001
1 2 3 4
Weights -0.127089 0.076313 0.103513 0.835885
1 2 3 4 5
Weights -0.176688 0.078979 0.105136 0.834645 -0.050073
1 2 3 4
Weights -0.011454 -0.396302 0.542473 0.810253
1 2 3 4 5
Weights -0.01115 -0.395145 0.541709 0.807161 -1.441597e-07
1 2 3 4
Weights -0.02051 -0.120254 0.443025 0.604001
1 2 3 4 5
Weights -0.017664 -0.119833 0.442685 0.603771 0.001433
1 2 3 4
Weights -0.0634 -0.119702 0.168568 0.880941
1 2 3 4 5
Weights -0.063442 -0.119702 0.168574 0.880942 -0.000014
1 2 3 4
Weights -0.207299 0.008564 0.188248 0.930583
1 2 3 4 5
Weights -0.202285 0.009251 0.187083 0.924357 0.000127
1 2 3 4
Weights -0.08164 -0.116078 0.343197 0.786344
1 2 3 4 5
Weights -0.082209 -0.115534 0.34257 0.782759 -0.00044
1 2 3 4
Weights -0.075242 -0.131763 0.221482 0.890717
1 2 3 4 5
Weights -0.072735 -0.130983 0.220493 0.885859 0.000202
1 2 3 4
Weights -0.068986 -0.028425 0.320641 0.627344
1 2 3 4 5
Weights -0.099821 -0.028652 0.321506 0.627091 -0.027132
1 2 3 4
Weights -0.060834 -0.215924 0.224918 0.916893
1 2 3 4 5
Weights -0.060683 -0.21593 0.224877 0.916876 0.000005
1 2 3 4
Weights 0.091637 -0.310991 0.271896 0.852402
1 2 3 4 5
Weights 0.093369 -0.310726 0.271435 0.851931 0.000078
1 2 3 4
Weights -0.210401 0.005046 0.215832 0.866152
1 2 3 4 5
Weights -0.209975 0.005008 0.215778 0.86607 0.000083
1 2 3 4
Weights -0.017376 -0.19665 0.259883 0.842372
1 2 3 4 5
Weights -0.016168 -0.196456 0.259586 0.841969 0.000135
1 2 3 4
Weights -0.054596 -0.365346 0.436941 0.896725
1 2 3 4 5
Weights -0.056599 -0.363032 0.437356 0.89183 -8.895358e-08
1 2 3 4
Weights 0.080526 0.15308 0.030096 0.690539
1 2 3 4 5
Weights 0.023073 0.188776 0.04181 0.611355 -0.232539
1 2 3 4
Weights -0.114153 -0.132032 0.283141 0.876998
1 2 3 4 5
Weights -0.11394 -0.132043 0.283077 0.87698 0.000003
1 2 3 4
Weights -0.054835 -0.059695 0.227531 0.839687
1 2 3 4 5
Weights -0.054086 -0.058108 0.227224 0.838115 -0.003937
1 2 3 4
Weights -0.07124 -0.243637 0.520031 0.724902
1 2 3 4 5
Weights -0.07116 -0.242715 0.51862 0.720378 -0.000011
1 2 3 4
Weights -0.054432 -0.368236 0.621986 0.736435
1 2 3 4 5
Weights -0.056858 -0.364736 0.62285 0.73095 -2.512232e-07
1 2 3 4
Weights -0.018126 -0.327098 0.336516 0.909628
1 2 3 4 5
Weights -0.019557 -0.326595 0.336869 0.907825 -0.000001
1 2 3 4
Weights 0.193591 0.182285 0.049013 0.493012
1 2 3 4 5
Weights 0.092921 0.210513 0.061979 0.476493 -0.190929
1 2 3 4
Weights -0.144758 -0.147211 0.23333 0.955593
1 2 3 4 5
Weights -0.145383 -0.146552 0.233574 0.952878 0.000005
1 2 3 4
Weights -0.067341 -0.261541 0.271173 0.983236
1 2 3 4 5
Weights -0.066428 -0.260839 0.270789 0.980559 3.720091e-07
1 2 3 4
Weights -0.16143 0.020023 0.100289 0.952162
1 2 3 4 5
Weights -0.158256 0.019888 0.099992 0.951041 0.00074
1 2 3 4
Weights 0.097577 -0.406603 0.420521 0.816355
1 2 3 4 5
Weights 0.098313 -0.404223 0.418996 0.810248 -0.000006
1 2 3 4
Weights -0.129531 -0.047099 0.209134 0.929257
1 2 3 4 5
Weights -0.121877 -0.046156 0.207078 0.922596 0.000085
1 2 3 4
Weights -0.103135 -0.349689 0.353397 0.982907
1 2 3 4 5
Weights -0.106345 -0.347046 0.354717 0.978773 -2.944708e-08
1 2 3 4
Weights -0.024491 -0.172015 0.233097 0.917904
1 2 3 4 5
Weights -0.014513 -0.168755 0.230488 0.906772 -0.000149
1 2 3 4
Weights 0.024492 -0.06428 0.278425 0.669044
1 2 3 4 5
Weights 0.073336 -0.065847 0.275927 0.664922 0.051224
1 2 3 4
Weights 0.015033 -0.168414 0.214371 0.868737
1 2 3 4 5
Weights 0.014076 -0.168417 0.21444 0.868569 -0.0003
1 2 3 4
Weights -0.118449 -0.236705 0.163722 1.085047
1 2 3 4 5
Weights -0.119062 -0.236411 0.164097 1.084449 -9.835116e-08
1 2 3 4
Weights -0.13618 -0.043511 0.238105 0.868153
1 2 3 4 5
Weights -0.127001 -0.042112 0.235219 0.856053 0.000399
1 2 3 4
Weights -0.123087 -0.172625 0.298519 0.880914
1 2 3 4 5
Weights -0.122884 -0.172631 0.298444 0.880793 0.000003
1 2 3 4
Weights -0.135197 -0.166029 0.241105 0.954836
1 2 3 4 5
Weights -0.137272 -0.165215 0.242212 0.951271 -0.000006
1 2 3 4
Weights 0.005223 -0.168892 0.367337 0.683269
1 2 3 4 5
Weights 0.000058 -0.169316 0.367643 0.682223 -0.0028
1 2 3 4
Weights -0.093757 -0.061781 0.219476 0.841735
1 2 3 4 5
Weights -0.085631 -0.060392 0.217367 0.831868 0.001932
1 2 3 4
Weights -0.0473 -0.257459 0.333606 0.907321
1 2 3 4 5
Weights -0.047954 -0.256583 0.333464 0.90332 -0.000003
1 2 3 4
Weights -0.18871 0.051217 -0.058647 1.107192
1 2 3 4 5
Weights -0.185184 0.050723 -0.05874 1.105102 0.000718
1 2 3 4
Weights -0.012666 -0.372996 0.494787 0.807934
1 2 3 4 5
Weights -0.015269 -0.371095 0.49512 0.802381 -6.655557e-07
1 2 3 4
Weights -0.198087 -0.100607 0.285524 0.922427
1 2 3 4 5
Weights -0.200417 -0.098307 0.286276 0.91395 0.000003
1 2 3 4
Weights -0.037168 -0.240259 0.19016 0.998595
1 2 3 4 5
Weights -0.037881 -0.240153 0.190378 0.998136 -0.000002
1 2 3 4
Weights 0.10371 -0.262809 0.371145 0.678856
1 2 3 4 5
Weights 0.106186 -0.262346 0.370833 0.678863 0.000945
1 2 3 4
Weights -0.15825 -0.076183 0.298341 0.855888
1 2 3 4 5
Weights -0.157286 -0.072759 0.295572 0.833764 0.000058
1 2 3 4
Weights -0.17039 -0.017288 0.305664 0.785368
1 2 3 4 5
Weights -0.166331 -0.016153 0.303784 0.776806 0.00061
1 2 3 4
Weights -0.032295 -0.09653 0.340541 0.677644
1 2 3 4 5
Weights -0.023415 -0.096226 0.339629 0.676349 0.006852
1 2 3 4
Weights -0.012808 -0.052476 0.28308 0.717141
1 2 3 4 5
Weights 0.034333 -0.050166 0.279645 0.709782 0.036295
1 2 3 4
Weights -0.128861 -0.215211 0.118351 1.113698
1 2 3 4 5
Weights -0.130008 -0.214613 0.118968 1.11266 -2.112456e-07
1 2 3 4
Weights -0.098565 -0.06401 0.12425 0.919392
1 2 3 4 5
Weights -0.100304 -0.063945 0.124411 0.919289 -0.000634
1 2 3 4
Weights -0.14249 0.102455 0.066075 0.912533
1 2 3 4 5
Weights -0.155477 0.10309 0.066418 0.912309 -0.012593
1 2 3 4
Weights 0.065655 -0.285342 0.304309 0.865765
1 2 3 4 5
Weights 0.06933 -0.282834 0.302963 0.859881 -0.000073
1 2 3 4
Weights -0.073317 -0.280684 0.32631 0.940181
1 2 3 4 5
Weights -0.075607 -0.277747 0.328143 0.930781 -7.810685e-07
1 2 3 4
Weights -0.142418 0.038219 0.085639 0.943043
1 2 3 4 5
Weights -0.133635 0.037961 0.085024 0.940249 0.004023
1 2 3 4
Weights -0.207308 -0.041345 0.335196 0.814874
1 2 3 4 5
Weights -0.206774 -0.041228 0.33501 0.814488 0.00002
1 2 3 4
Weights -0.028374 -0.226345 0.491865 0.692961
1 2 3 4 5
Weights -0.023144 -0.223862 0.488214 0.685886 -0.000139
1 2 3 4
Weights -0.080706 -0.158375 0.200527 0.960518
1 2 3 4 5
Weights -0.078814 -0.156799 0.201713 0.954561 -0.00004
1 2 3 4
Weights -0.016394 -0.124353 0.319456 0.769229
1 2 3 4 5
Weights -0.009703 -0.123483 0.318172 0.766946 0.001121
1 2 3 4
Weights -0.133893 0.020416 0.360506 0.653255
1 2 3 4 5
Weights -0.120145 0.021444 0.359057 0.650586 0.008696
1 2 3 4
Weights -0.140065 -0.140432 0.265255 0.950562
1 2 3 4 5
Weights -0.13522 -0.139731 0.264373 0.943707 -0.000005
1 2 3 4
Weights -0.104139 -0.107252 0.229987 0.880197
1 2 3 4 5
Weights -0.105355 -0.106632 0.230135 0.877615 -0.000223
1 2 3 4
Weights -0.195781 -0.069816 0.216669 0.94942
1 2 3 4 5
Weights -0.195401 -0.069811 0.216578 0.949149 0.000004
1 2 3 4
Weights -0.214544 -0.128757 0.256784 0.997251
1 2 3 4 5
Weights -0.21518 -0.128576 0.257128 0.996751 -1.912605e-07
1 2 3 4
Weights -0.16037 -0.11492 0.153685 1.023577
1 2 3 4 5
Weights -0.160326 -0.114925 0.153713 1.023541 3.775055e-07
1 2 3 4
Weights -0.221777 -0.163819 0.326647 0.960096
1 2 3 4 5
Weights -0.223177 -0.162745 0.327266 0.956916 1.049682e-07
1 2 3 4
Weights -0.089778 -0.135533 0.201065 0.905062
1 2 3 4 5
Weights -0.089085 -0.13549 0.200933 0.904677 0.000071
1 2 3 4
Weights -0.125584 -0.181293 0.013866 1.213356
1 2 3 4 5
Weights -0.127197 -0.180397 0.014874 1.211839 -1.284274e-07
1 2 3 4
Weights -0.019365 -0.11979 0.245406 0.80118
1 2 3 4 5
Weights -0.022101 -0.119602 0.245588 0.800705 -0.001803
1 2 3 4
Weights -0.163535 -0.143294 0.282095 0.937828
1 2 3 4 5
Weights -0.164926 -0.142902 0.283041 0.934884 -0.000003
1 2 3 4
Weights -0.0407 -0.240993 0.326819 0.900722
1 2 3 4 5
Weights -0.041833 -0.240699 0.327058 0.899733 -0.000002
1 2 3 4
Weights -0.202487 -0.193891 0.381398 0.901241
1 2 3 4 5
Weights -0.202506 -0.193725 0.381367 0.900893 9.854646e-08
1 2 3 4
Weights -0.062713 0.278859 -0.538007 1.160583
1 2 3 4 5
Weights -0.091176 0.261665 -0.528643 1.15431 -0.069008
1 2 3 4
Weights -0.208089 -0.135435 0.45353 0.748619
1 2 3 4 5
Weights -0.209257 -0.135404 0.453972 0.747881 -0.000009
1 2 3 4
Weights 0.099728 -0.37421 0.63946 0.561694
1 2 3 4 5
Weights 0.101318 -0.373463 0.638647 0.561241 0.000031
1 2 3 4
Weights -0.147911 -0.223315 0.328781 0.924289
1 2 3 4 5
Weights -0.148468 -0.223044 0.328932 0.92381 -3.107928e-07
1 2 3 4
Weights -0.064857 -0.057518 0.321597 0.737255
1 2 3 4 5
Weights -0.064465 -0.057506 0.321569 0.737223 0.000283
1 2 3 4
Weights 0.16209 0.031063 -0.004958 0.53419
1 2 3 4 5
Weights 0.258893 0.027865 -0.007895 0.530832 0.115301
1 2 3 4
Weights -0.007037 -0.284018 0.384895 0.79564
1 2 3 4 5
Weights -0.007009 -0.284014 0.384888 0.79564 3.610072e-07
1 2 3 4
Weights -0.048177 -0.322875 0.104892 1.212352
1 2 3 4 5
Weights -0.049118 -0.32192 0.106047 1.209159 -7.363077e-09
1 2 3 4
Weights -0.107267 -0.119918 0.258366 0.878488
1 2 3 4 5
Weights -0.107283 -0.119921 0.258369 0.878491 -6.300095e-07
1 2 3 4
Weights -0.037997 -0.35202 0.466327 0.823829
1 2 3 4 5
Weights -0.040194 -0.35075 0.466152 0.818988 -7.866112e-07
1 2 3 4
Weights -0.030031 -0.004559 0.05903 0.874914
1 2 3 4 5
Weights -0.079564 -0.003152 0.060336 0.874287 -0.050229
1 2 3 4
Weights -0.228435 -0.063043 0.233921 0.967644
1 2 3 4 5
Weights -0.22857 -0.063015 0.233969 0.967653 -2.597408e-07
1 2 3 4
Weights -0.117999 -0.124718 0.246435 0.950663
1 2 3 4 5
Weights -0.118069 -0.124676 0.246438 0.950612 -9.365548e-07
1 2 3 4
Weights -0.093605 -0.006906 0.102649 0.876906
1 2 3 4 5
Weights -0.101215 -0.006399 0.103176 0.876973 -0.006772
1 2 3 4
Weights -0.129545 -0.023044 0.18386 0.844481
1 2 3 4 5
Weights -0.125599 -0.023172 0.18338 0.843398 0.003339
1 2 3 4
Weights -0.092807 -0.131776 0.282139 0.89545
1 2 3 4 5
Weights -0.087124 -0.131322 0.280401 0.889879 -0.000032
1 2 3 4
Weights -0.009498 -0.311705 0.412706 0.824386
1 2 3 4 5
Weights -0.012622 -0.308275 0.413787 0.818636 -0.000005
1 2 3 4
Weights -0.128412 -0.118041 0.323202 0.825309
1 2 3 4 5
Weights -0.126777 -0.117139 0.322148 0.820003 0.000065
1 2 3 4
Weights -0.161916 -0.163053 0.330386 0.87085
1 2 3 4 5
Weights -0.163844 -0.162454 0.331143 0.867394 -0.000009
1 2 3 4
Weights 0.010511 0.052463 0.053912 0.704616
1 2 3 4 5
Weights 0.029406 0.05152 0.053517 0.704268 0.025151
1 2 3 4
Weights -0.126994 -0.19504 0.219551 1.019476
1 2 3 4 5
Weights -0.127449 -0.194814 0.21972 1.019008 -2.496967e-07
1 2 3 4
Weights -0.115298 -0.20497 0.425823 0.816829
1 2 3 4 5
Weights -0.115955 -0.203436 0.426264 0.810656 -0.000004
1 2 3 4
Weights 0.002651 -0.348249 0.358977 0.939376
1 2 3 4 5
Weights 0.001991 -0.342622 0.359545 0.930725 -3.536078e-07
1 2 3 4
Weights -0.108273 -0.129958 0.033109 1.106449
1 2 3 4 5
Weights -0.10783 -0.130027 0.033071 1.106093 0.000005
1 2 3 4
Weights -0.044944 -0.26287 0.476066 0.758691
1 2 3 4 5
Weights -0.045668 -0.262229 0.475495 0.754998 -0.000011
1 2 3 4
Weights -0.013013 -0.317016 0.426181 0.842591
1 2 3 4 5
Weights -0.012078 -0.316855 0.425633 0.842049 7.291581e-07
1 2 3 4
Weights -0.159968 -0.172638 0.334498 0.881469
1 2 3 4 5
Weights -0.160209 -0.172606 0.334575 0.881311 -8.056286e-07
1 2 3 4
Weights -0.037341 0.013818 0.223824 0.635338
1 2 3 4 5
Weights -0.177791 0.017895 0.22457 0.631028 -0.148903
1 2 3 4
Weights 0.100711 -0.191716 0.329805 0.692442
1 2 3 4 5
Weights 0.12667 -0.187924 0.326782 0.689106 0.015979
1 2 3 4
Weights -0.208243 -0.148594 0.549474 0.67443
1 2 3 4 5
Weights -0.208918 -0.148639 0.550105 0.673788 -0.000005
1 2 3 4
Weights -0.141898 -0.110924 0.232945 0.927262
1 2 3 4 5
Weights -0.142419 -0.109924 0.233124 0.921883 0.000022
1 2 3 4
Weights 0.381675 0.062528 0.063654 0.421444
1 2 3 4 5
Weights 0.28807 0.092933 0.079022 0.40785 -0.219932
1 2 3 4
Weights 0.029428 -0.45095 0.512332 0.853102
1 2 3 4 5
Weights 0.029212 -0.449299 0.512024 0.849362 -6.726145e-08
1 2 3 4
Weights -0.032708 -0.262768 0.405787 0.806008
1 2 3 4 5
Weights -0.033176 -0.26281 0.40592 0.805944 -0.000002
1 2 3 4
Weights -0.083386 -0.227836 0.200279 1.03655
1 2 3 4 5
Weights -0.085911 -0.225948 0.201311 1.032386 -8.863364e-07
1 2 3 4
Weights 0.015592 -0.129297 0.162262 0.860778
1 2 3 4 5
Weights 0.0153 -0.1293 0.162288 0.860776 -0.000166
1 2 3 4
Weights 0.025628 -0.33195 0.415192 0.805635
1 2 3 4 5
Weights 0.0252 -0.33178 0.415224 0.80494 -0.000003
1 2 3 4
Weights 0.053593 0.12952 0.066834 0.606904
1 2 3 4 5
Weights -0.015461 0.136383 0.068805 0.602446 -0.093771
1 2 3 4
Weights -0.035522 -0.112468 0.319545 0.772909
1 2 3 4 5
Weights -0.032388 -0.110198 0.31709 0.768639 -0.002372
1 2 3 4
Weights -0.090998 -0.277814 0.439076 0.858399
1 2 3 4 5
Weights -0.091032 -0.277813 0.439089 0.858403 -9.879678e-09
1 2 3 4
Weights -0.178236 -0.155317 0.165772 1.076691
1 2 3 4 5
Weights -0.178938 -0.155028 0.166036 1.076313 -1.406621e-07
1 2 3 4
Weights -0.03619 -0.406361 0.481499 0.863265
1 2 3 4 5
Weights -0.036214 -0.40636 0.481507 0.863261 -2.029819e-09
1 2 3 4
Weights -0.133475 -0.06212 0.189467 0.9403
1 2 3 4 5
Weights -0.130915 -0.061487 0.188941 0.93526 -0.000211
1 2 3 4
Weights -0.172166 -0.05671 0.170552 0.962851
1 2 3 4 5
Weights -0.173472 -0.055453 0.171112 0.954132 0.000052
1 2 3 4
Weights -0.00556 -0.395592 0.505087 0.830839
1 2 3 4 5
Weights -0.005142 -0.392605 0.504347 0.826209 -1.672063e-07
1 2 3 4
Weights -0.000324 0.01582 0.15117 0.694887
1 2 3 4 5
Weights 0.100203 0.01045 0.147668 0.687268 0.118364
1 2 3 4
Weights 0.128607 -0.114295 0.245675 0.676194
1 2 3 4 5
Weights 0.083973 -0.113835 0.246086 0.675225 -0.053736
1 2 3 4
Weights -0.137674 -0.072332 0.140423 0.960372
1 2 3 4 5
Weights -0.1366 -0.072197 0.140109 0.958542 0.000143
1 2 3 4
Weights 0.024927 -0.328772 0.376807 0.868938
1 2 3 4 5
Weights 0.025376 -0.32734 0.376558 0.865253 -0.000003
1 2 3 4
Weights -0.185035 -0.036549 0.217049 0.916408
1 2 3 4 5
Weights -0.182261 -0.036314 0.216302 0.911578 0.000075
1 2 3 4
Weights -0.243279 -0.045304 0.310472 0.889702
1 2 3 4 5
Weights -0.24354 -0.045276 0.310552 0.889718 -9.022049e-07
1 2 3 4
Weights -0.022376 -0.132982 0.280192 0.818175
1 2 3 4 5
Weights -0.009841 -0.130839 0.276984 0.810569 0.000949
1 2 3 4
Weights -0.182001 -0.180653 0.320769 0.925469
1 2 3 4 5
Weights -0.18191 -0.180567 0.320662 0.925139 3.034999e-07
1 2 3 4
Weights -0.069672 -0.105255 0.324256 0.781141
1 2 3 4 5
Weights -0.06724 -0.104915 0.323814 0.780715 0.000223
1 2 3 4
Weights -0.016895 -0.23254 0.153256 0.958345
1 2 3 4 5
Weights -0.016654 -0.232532 0.153202 0.9583 0.000018
1 2 3 4
Weights 0.022481 -0.131577 0.21244 0.760523
1 2 3 4 5
Weights 0.015135 -0.131652 0.212883 0.760569 -0.00599
1 2 3 4
Weights -0.248839 -0.142512 0.462355 0.841014
1 2 3 4 5
Weights -0.248384 -0.142164 0.46212 0.840271 1.082554e-07
1 2 3 4
Weights 0.004716 -0.273489 0.594216 0.588842
1 2 3 4 5
Weights 0.00342 -0.273901 0.59427 0.588176 -0.00007
1 2 3 4
Weights -0.121799 0.023751 0.167091 0.856263
1 2 3 4 5
Weights -0.110823 0.024177 0.166227 0.853858 0.005082
1 2 3 4
Weights -0.095559 -0.27724 0.229249 1.051423
1 2 3 4 5
Weights -0.095622 -0.277098 0.229281 1.051088 2.730295e-08
1 2 3 4
Weights -0.124513 -0.065585 0.129546 0.959842
1 2 3 4 5
Weights -0.122336 -0.065621 0.129356 0.958693 0.0002
1 2 3 4
Weights -0.039269 -0.211149 0.208379 0.955195
1 2 3 4 5
Weights -0.038965 -0.21118 0.208295 0.955168 0.000002
1 2 3 4
Weights -0.15437 -0.128932 0.363735 0.810107
1 2 3 4 5
Weights -0.154678 -0.128447 0.363247 0.806817 0.000022
1 2 3 4
Weights -0.176825 0.003974 0.257393 0.82991
1 2 3 4 5
Weights -0.174372 0.007799 0.256395 0.806625 0.001038
1 2 3 4
Weights 0.245072 -0.111408 0.129108 0.683725
1 2 3 4 5
Weights 0.170721 -0.102293 0.128428 0.657088 -0.165438
1 2 3 4
Weights -0.053653 -0.156746 0.180478 0.906685
1 2 3 4 5
Weights -0.05272 -0.156728 0.180307 0.906445 0.000124
1 2 3 4
Weights -0.063273 -0.211343 0.440294 0.752009
1 2 3 4 5
Weights -0.061186 -0.210283 0.438869 0.748067 0.00002
1 2 3 4
Weights -0.040329 -0.014287 0.176309 0.811266
1 2 3 4 5
Weights 0.029708 -0.014806 0.172032 0.798049 0.061511
1 2 3 4
Weights -0.049439 -0.244092 0.255596 0.968896
1 2 3 4 5
Weights -0.049387 -0.24401 0.255513 0.968839 2.476056e-07
1 2 3 4
Weights -0.032799 -0.357149 0.425715 0.911441
1 2 3 4 5
Weights -0.03313 -0.356887 0.425797 0.910616 -4.736049e-08
1 2 3 4
Weights -0.195576 -0.099886 0.398822 0.794825
1 2 3 4 5
Weights -0.196446 -0.099627 0.398725 0.79421 -0.000009
1 2 3 4
Weights -0.037229 -0.371153 0.460452 0.836582
1 2 3 4 5
Weights -0.041919 -0.367756 0.461552 0.83017 -4.965813e-07
1 2 3 4
Weights -0.187042 -0.108681 0.179253 1.012636
1 2 3 4 5
Weights -0.187502 -0.107506 0.179036 1.010247 0.000002
1 2 3 4
Weights -0.068056 -0.07185 0.216421 0.842565
1 2 3 4 5
Weights -0.071081 -0.071842 0.216877 0.841811 -0.001374
1 2 3 4
Weights 0.04445 0.063898 0.055683 0.763813
1 2 3 4 5
Weights -0.00008 0.069642 0.058882 0.760269 -0.072687
1 2 3 4
Weights -0.180262 -0.021293 0.266451 0.829585
1 2 3 4 5
Weights -0.180755 -0.021351 0.266529 0.829506 -0.000087
1 2 3 4
Weights -0.224734 -0.079961 0.329663 0.877935
1 2 3 4 5
Weights -0.224196 -0.079986 0.329576 0.877637 0.000001
1 2 3 4
Weights -0.02929 -0.015031 0.279392 0.715913
1 2 3 4 5
Weights 0.086448 -0.013463 0.269811 0.688756 0.102377
1 2 3 4
Weights -0.026334 -0.363024 0.227977 1.061936
1 2 3 4 5
Weights -0.030569 -0.359871 0.230335 1.055604 -8.630916e-08
1 2 3 4
Weights 0.002243 -0.027064 0.152631 0.79091
1 2 3 4 5
Weights 0.002967 -0.027074 0.152613 0.790894 0.000734
1 2 3 4
Weights 0.057708 -0.333153 0.392261 0.839584
1 2 3 4 5
Weights 0.066748 -0.32778 0.387241 0.829831 -0.00001
1 2 3 4
Weights -0.049498 -0.313568 0.177703 1.071589
1 2 3 4 5
Weights -0.052601 -0.311745 0.179703 1.06675 -3.192149e-07
1 2 3 4
Weights -0.06031 -0.040706 0.315503 0.662942
1 2 3 4 5
Weights -0.05232 -0.040634 0.315056 0.662472 0.006838
1 2 3 4
Weights -0.07099 -0.313046 0.515653 0.799314
1 2 3 4 5
Weights -0.071439 -0.308351 0.515292 0.791663 -5.554768e-07
1 2 3 4
Weights -0.006761 0.031234 0.114174 0.710793
1 2 3 4 5
Weights -0.013681 0.031443 0.114247 0.710809 -0.007972
1 2 3 4
Weights -0.00267 -0.395135 0.335507 0.973531
1 2 3 4 5
Weights -0.006646 -0.390829 0.336667 0.965322 -1.201462e-07
1 2 3 4
Weights -0.145198 -0.114883 0.197498 0.956512
1 2 3 4 5
Weights -0.145047 -0.114861 0.197522 0.956223 0.000008
1 2 3 4
Weights -0.029364 -0.206871 0.378952 0.747648
1 2 3 4 5
Weights -0.028854 -0.206773 0.378856 0.747586 0.000032
1 2 3 4
Weights -0.030138 -0.321158 0.504516 0.778814
1 2 3 4 5
Weights -0.029774 -0.319001 0.503695 0.773846 -0.000002
1 2 3 4
Weights -0.131325 -0.153309 0.135789 1.04814
1 2 3 4 5
Weights -0.132435 -0.153085 0.136198 1.047561 -0.000003
1 2 3 4
Weights -0.17053 -0.024801 0.203526 0.921763
1 2 3 4 5
Weights -0.164216 -0.023051 0.202122 0.911153 0.000132
1 2 3 4
Weights -0.078537 -0.147354 0.07966 1.073456
1 2 3 4 5
Weights -0.080163 -0.146908 0.080174 1.07235 -0.000016
1 2 3 4
Weights -0.035369 -0.413796 0.355409 0.991736
1 2 3 4 5
Weights -0.039208 -0.410894 0.357425 0.986719 -1.993238e-08
1 2 3 4
Weights -0.072281 -0.208971 0.047599 1.136149
1 2 3 4 5
Weights -0.072827 -0.208737 0.047846 1.135678 -7.690663e-07
1 2 3 4
Weights -0.144512 0.028178 0.14538 0.871143
1 2 3 4 5
Weights -0.145868 0.028182 0.145474 0.871216 -0.000768
1 2 3 4
Weights -0.142803 -0.1101 0.340716 0.799687
1 2 3 4 5
Weights -0.142505 -0.110064 0.34064 0.799635 0.000014
1 2 3 4
Weights -0.124621 -0.258389 0.456455 0.807841
1 2 3 4 5
Weights -0.126316 -0.257631 0.456674 0.80554 -0.000001
1 2 3 4
Weights -0.124354 -0.224497 0.26651 0.990473
1 2 3 4 5
Weights -0.124427 -0.224464 0.266543 0.990426 -3.202499e-08
1 2 3 4
Weights -0.190709 -0.005547 0.131807 0.982056
1 2 3 4 5
Weights -0.191191 -0.006152 0.132223 0.981612 -0.000082
1 2 3 4
Weights -0.077748 0.023033 0.323605 0.705532
1 2 3 4 5
Weights -0.066549 0.024874 0.323162 0.704346 0.005317
1 2 3 4
Weights -0.035876 -0.18452 0.314834 0.865413
1 2 3 4 5
Weights -0.028485 -0.180302 0.312807 0.857073 -0.000049
1 2 3 4
Weights -0.047262 -0.002229 0.226202 0.70838
1 2 3 4 5
Weights 0.013664 -0.004724 0.223406 0.702286 0.065991
1 2 3 4
Weights 0.06832 -0.209011 0.25416 0.776763
1 2 3 4 5
Weights 0.078465 -0.208022 0.252743 0.774405 0.005234
1 2 3 4
Weights -0.113635 -0.131823 0.146742 0.998626
1 2 3 4 5
Weights -0.113043 -0.131753 0.146783 0.997559 0.000014
1 2 3 4
Weights -0.228127 -0.106717 0.231812 1.012897
1 2 3 4 5
Weights -0.228715 -0.106483 0.232049 1.012569 -1.672705e-07
1 2 3 4
Weights -0.035312 -0.445716 0.497934 0.925608
1 2 3 4 5
Weights -0.036185 -0.443327 0.498413 0.921723 -4.003673e-09
1 2 3 4
Weights -0.142696 0.205387 0.273841 0.485771
1 2 3 4 5
Weights -0.221546 0.211324 0.272911 0.482824 -0.094931
1 2 3 4
Weights -0.05705 -0.229491 0.407891 0.805639
1 2 3 4 5
Weights -0.05486 -0.228077 0.406301 0.80063 0.000009
1 2 3 4
Weights -0.19431 -0.187309 0.349044 0.965446
1 2 3 4 5
Weights -0.194244 -0.185635 0.348933 0.960569 -1.125158e-07
1 2 3 4
Weights -0.118332 -0.061904 0.073043 1.051207
1 2 3 4 5
Weights -0.117551 -0.060663 0.073649 1.044939 -0.000131
1 2 3 4
Weights 0.02993 -0.395992 0.469445 0.844986
1 2 3 4 5
Weights 0.026923 -0.386954 0.471549 0.83452 -5.347072e-07
1 2 3 4
Weights -0.106968 -0.078205 0.389156 0.667093
1 2 3 4 5
Weights -0.103061 -0.077729 0.387859 0.664033 0.002255
1 2 3 4
Weights -0.078667 0.038441 0.139088 0.796303
1 2 3 4 5
Weights -0.061441 0.038001 0.138643 0.795807 0.017018
1 2 3 4
Weights 0.056119 0.025977 0.18918 0.671473
1 2 3 4 5
Weights -0.014199 0.040462 0.190223 0.658052 -0.119519
1 2 3 4
Weights -0.046847 -0.174123 0.269078 0.860805
1 2 3 4 5
Weights -0.047713 -0.174156 0.269185 0.86047 -0.00004
1 2 3 4
Weights 0.052456 -0.173319 0.358314 0.687485
1 2 3 4 5
Weights 0.062658 -0.171302 0.356832 0.685638 0.004283
1 2 3 4
Weights -0.157725 0.068374 0.119818 0.880075
1 2 3 4 5
Weights -0.146118 0.067849 0.118909 0.878384 0.008756
1 2 3 4
Weights -0.015844 0.003139 0.041231 0.858716
1 2 3 4 5
Weights 0.052657 0.000501 0.038835 0.852898 0.079164
1 2 3 4
Weights -0.011016 -0.191651 0.379528 0.746708
1 2 3 4 5
Weights -0.009658 -0.191009 0.378672 0.744854 -0.000244
1 2 3 4
Weights -0.078963 -0.334799 0.335658 0.972491
1 2 3 4 5
Weights -0.080023 -0.334184 0.336141 0.971368 -4.512022e-08
1 2 3 4
Weights -0.134622 -0.24039 0.262206 1.015514
1 2 3 4 5
Weights -0.136193 -0.239415 0.263134 1.012878 9.899304e-08
1 2 3 4
Weights -0.001953 -0.096623 0.319345 0.715625
1 2 3 4 5
Weights 0.017614 -0.09403 0.316802 0.71031 0.01011
1 2 3 4
Weights -0.157859 -0.320383 0.430045 0.96115
1 2 3 4 5
Weights -0.158239 -0.320211 0.430375 0.960793 -1.685959e-09
Predicting t+2...
Fitting t+3...
1 2 3 4
Weights -0.255585 -0.01136 0.313564 0.818924
1 2 3 4 5
Weights -0.254538 -0.01159 0.313325 0.81769 0.000088
1 2 3 4
Weights -0.055682 0.053627 0.145081 0.731377
1 2 3 4 5
Weights -0.037798 0.052954 0.144364 0.730743 0.018338
1 2 3 4
Weights -0.086472 -0.290653 0.199221 1.072153
1 2 3 4 5
Weights -0.094466 -0.289343 0.200697 1.071838 -0.005524
1 2 3 4
Weights -0.074532 -0.256497 0.560001 0.652402
1 2 3 4 5
Weights -0.063476 -0.256628 0.559118 0.651717 0.009897
1 2 3 4
Weights 0.04503 -0.041891 0.158835 0.788967
1 2 3 4 5
Weights 0.027921 -0.039665 0.158463 0.788225 -0.029695
1 2 3 4
Weights -0.071104 -0.204754 0.120356 1.038711
1 2 3 4 5
Weights -0.093367 -0.202654 0.122466 1.038244 -0.018606
1 2 3 4
Weights -0.08688 -0.142759 0.415225 0.736634
1 2 3 4 5
Weights -0.080332 -0.14243 0.414505 0.735961 0.004165
1 2 3 4
Weights -0.185678 -0.066327 0.101592 1.049717
1 2 3 4 5
Weights -0.184824 -0.065915 0.101614 1.046919 0.000154
1 2 3 4
Weights -0.129815 -0.097025 0.078189 1.056087
1 2 3 4 5
Weights -0.13109 -0.096844 0.078422 1.056038 -0.000368
1 2 3 4
Weights -0.021637 -0.243946 -0.082888 1.25544
1 2 3 4 5
Weights -0.012772 -0.244325 -0.083187 1.255003 0.008755
1 2 3 4
Weights -0.158904 -0.222866 0.646848 0.591004
1 2 3 4 5
Weights -0.161359 -0.222991 0.647227 0.591085 -0.001553
1 2 3 4
Weights -0.041966 -0.098744 0.246228 0.797511
1 2 3 4 5
Weights -0.075278 -0.096766 0.247025 0.796713 -0.036777
1 2 3 4
Weights 0.302188 -0.254555 0.42137 0.454084
1 2 3 4 5
Weights 0.236161 -0.233529 0.428149 0.445412 -0.124926
1 2 3 4
Weights -0.033364 -0.239709 0.502055 0.69789
1 2 3 4 5
Weights -0.031041 -0.239702 0.501966 0.697846 0.00214
1 2 3 4
Weights 0.015932 -0.166423 0.240362 0.848702
1 2 3 4 5
Weights -0.018683 -0.164482 0.23904 0.847166 -0.046252
1 2 3 4
Weights -0.061307 -0.197824 0.167681 0.994309
1 2 3 4 5
Weights -0.068117 -0.197385 0.168258 0.994478 -0.00566
1 2 3 4
Weights -0.080071 -0.155193 0.237343 0.907938
1 2 3 4 5
Weights -0.084618 -0.155005 0.237795 0.907992 -0.00323
1 2 3 4
Weights -0.054032 0.094842 0.238382 0.609204
1 2 3 4 5
Weights -0.031654 0.094284 0.23759 0.608363 0.022169
1 2 3 4
Weights -0.136526 -0.028739 0.178496 0.911827
1 2 3 4 5
Weights -0.121816 -0.025423 0.17572 0.884336 0.00266
1 2 3 4
Weights 0.036465 -0.410082 0.254123 1.017688
1 2 3 4 5
Weights 0.081052 -0.403596 0.263172 1.007491 0.06902
1 2 3 4
Weights -0.138808 -0.140558 0.222161 0.9519
1 2 3 4 5
Weights -0.138663 -0.140553 0.222133 0.951884 0.000054
1 2 3 4
Weights -0.101423 -0.189763 0.264213 0.914141
1 2 3 4 5
Weights -0.113128 -0.188846 0.265773 0.913604 -0.008069
1 2 3 4
Weights -0.069466 -0.255923 0.13 1.076936
1 2 3 4 5
Weights -0.059072 -0.256885 0.128972 1.076227 0.008898
1 2 3 4
Weights -0.185295 -0.016679 -0.074843 1.191167
1 2 3 4 5
Weights -0.18688 -0.016139 -0.074322 1.190836 -0.000025
1 2 3 4
Weights 0.009591 -0.300259 0.464256 0.756829
1 2 3 4 5
Weights -0.004005 -0.299672 0.463668 0.756779 -0.01604
1 2 3 4
Weights -0.004418 -0.162062 0.325373 0.773807
1 2 3 4 5
Weights -0.082989 -0.154336 0.32164 0.763812 -0.101726
1 2 3 4
Weights -0.253484 0.068496 0.305645 0.809106
1 2 3 4 5
Weights -0.250437 0.06887 0.304449 0.805815 0.000033
1 2 3 4
Weights 0.051855 -0.234672 0.396711 0.712681
1 2 3 4 5
Weights 0.028762 -0.23112 0.394305 0.711858 -0.0366
1 2 3 4
Weights -0.06007 -0.049043 0.263158 0.724431
1 2 3 4 5
Weights -0.043708 -0.049558 0.262435 0.72373 0.015945
1 2 3 4
Weights 0.121114 -0.398591 0.344617 0.847636
1 2 3 4 5
Weights 0.085885 -0.394893 0.334176 0.842037 -0.071604
1 2 3 4
Weights -0.085934 -0.380161 0.471132 0.929467
1 2 3 4 5
Weights -0.092831 -0.379436 0.473272 0.928592 -0.00283
1 2 3 4
Weights -0.022663 -0.364238 0.241989 1.040159
1 2 3 4 5
Weights 0.004054 -0.364806 0.241367 1.038396 0.027713
1 2 3 4
Weights -0.100184 -0.107912 0.18664 0.925849
1 2 3 4 5
Weights -0.100727 -0.107886 0.186703 0.925873 -0.000329
1 2 3 4
Weights -0.05212 -0.216472 0.344732 0.843072
1 2 3 4 5
Weights -0.014515 -0.216209 0.341441 0.839856 0.029614
1 2 3 4
Weights 0.170159 0.113226 -0.024498 0.634672
1 2 3 4 5
Weights 0.044346 0.116849 -0.019779 0.616879 -0.194594
1 2 3 4
Weights -0.030164 -0.123085 0.301298 0.791196
1 2 3 4 5
Weights 0.007272 -0.124206 0.299546 0.787302 0.03862
1 2 3 4
Weights -0.051505 0.010284 0.142652 0.797204
1 2 3 4 5
Weights -0.065922 0.01068 0.143073 0.797333 -0.014047
1 2 3 4
Weights -0.095866 -0.111411 0.232374 0.843406
1 2 3 4 5
Weights -0.078177 -0.112414 0.231036 0.841419 0.017355
1 2 3 4
Weights 0.039805 -0.386263 0.324459 0.959719
1 2 3 4 5
Weights 0.04361 -0.386056 0.32578 0.95928 0.007794
1 2 3 4
Weights -0.035947 -0.371006 0.345183 0.990275
1 2 3 4 5
Weights -0.015883 -0.371595 0.343141 0.988714 0.016712
1 2 3 4
Weights -0.000588 0.001096 0.316104 0.615533
1 2 3 4 5
Weights 0.019913 -0.00032 0.315672 0.61489 0.023959
1 2 3 4
Weights 0.117621 0.113051 0.004942 0.647582
1 2 3 4 5
Weights -0.007378 0.122187 0.016666 0.650617 -0.158849
1 2 3 4
Weights -0.139885 0.094827 0.219763 0.740501
1 2 3 4 5
Weights -0.12928 0.095987 0.218765 0.736905 0.003535
1 2 3 4
Weights -0.150142 -0.012396 0.204563 0.840102
1 2 3 4 5
Weights -0.144413 -0.012207 0.203762 0.837721 0.003137
1 2 3 4
Weights -0.131747 -0.110322 0.28954 0.867265
1 2 3 4 5
Weights -0.131372 -0.110305 0.28947 0.867223 0.000109
1 2 3 4
Weights 0.055366 -0.559806 0.343571 1.066679
1 2 3 4 5
Weights 0.086926 -0.54227 0.36408 1.047501 0.067073
1 2 3 4
Weights -0.04913 -0.06709 0.20988 0.805025
1 2 3 4 5
Weights -0.060587 -0.066813 0.210231 0.805144 -0.011107
1 2 3 4
Weights -0.064604 -0.19958 0.154244 1.043617
1 2 3 4 5
Weights -0.061094 -0.199829 0.153848 1.04335 0.002267
1 2 3 4
Weights -0.172486 -0.207997 0.384411 0.899286
1 2 3 4 5
Weights -0.173237 -0.208003 0.384687 0.899216 -0.000072
1 2 3 4
Weights 0.280663 0.076554 0.075619 0.503552
1 2 3 4 5
Weights 0.185776 0.097435 0.084831 0.484193 -0.199199
1 2 3 4
Weights -0.023608 -0.217333 0.123052 1.039497
1 2 3 4 5
Weights 0.003831 -0.218 0.122114 1.038239 0.025895
1 2 3 4
Weights -0.060199 -0.109318 0.389158 0.671495
1 2 3 4 5
Weights -0.029484 -0.109766 0.387574 0.669479 0.029013
1 2 3 4
Weights -0.103878 -0.049245 0.226935 0.853896
1 2 3 4 5
Weights -0.083065 -0.04702 0.223181 0.837355 0.008092
1 2 3 4
Weights 0.064955 -0.32729 0.273072 0.924667
1 2 3 4 5
Weights 0.064875 -0.327294 0.273048 0.924668 -0.000154
1 2 3 4
Weights -0.217135 0.108121 0.38163 0.611587
1 2 3 4 5
Weights -0.211599 0.10942 0.377806 0.600651 0.001381
1 2 3 4
Weights 0.046668 0.254702 -0.008694 0.567613
1 2 3 4 5
Weights -0.173002 0.259773 -0.004937 0.554803 -0.250537
1 2 3 4
Weights -0.144928 -0.148965 0.15578 1.064264
1 2 3 4 5
Weights -0.144765 -0.148989 0.155732 1.064243 0.000014
1 2 3 4
Weights 0.102255 -0.094667 0.168106 0.722174
1 2 3 4 5
Weights 0.040588 -0.090241 0.166981 0.71866 -0.086973
1 2 3 4
Weights 0.007507 -0.309365 0.068561 1.122165
1 2 3 4 5
Weights 0.038799 -0.308395 0.069454 1.120786 0.035145
1 2 3 4
Weights -0.056488 -0.093323 0.301315 0.759553
1 2 3 4 5
Weights -0.022917 -0.093048 0.299461 0.756933 0.028922
1 2 3 4
Weights -0.088228 -0.108627 0.095778 1.018565
1 2 3 4 5
Weights -0.084245 -0.10893 0.095333 1.018084 0.00231
1 2 3 4
Weights 0.118567 0.115323 0.059806 0.608699
1 2 3 4 5
Weights 0.001215 0.125229 0.063123 0.594381 -0.166568
1 2 3 4
Weights 0.202978 -0.116824 0.268232 0.583766
1 2 3 4 5
Weights 0.128283 -0.089648 0.262969 0.561207 -0.165475
1 2 3 4
Weights -0.207456 0.030362 0.349792 0.726016
1 2 3 4 5
Weights -0.201749 0.032138 0.346917 0.714863 0.000672
1 2 3 4
Weights 0.107526 0.034438 -0.100424 0.865525
1 2 3 4 5
Weights 0.063479 0.034629 -0.098063 0.865652 -0.061017
1 2 3 4
Weights -0.06008 -0.23499 0.064896 1.127952
1 2 3 4 5
Weights -0.058681 -0.235137 0.064758 1.127898 0.001146
1 2 3 4
Weights -0.214086 -0.064483 0.353301 0.786902
1 2 3 4 5
Weights -0.212501 -0.064304 0.352896 0.785177 0.000563
1 2 3 4
Weights -0.11463 -0.124054 -0.082009 1.240807
1 2 3 4 5
Weights -0.119573 -0.12223 -0.080368 1.239204 -0.000883
1 2 3 4
Weights 0.051376 0.014064 0.042212 0.812644
1 2 3 4 5
Weights -0.012764 0.016102 0.042729 0.807503 -0.090017
1 2 3 4
Weights -0.014391 -0.274008 0.012119 1.158152
1 2 3 4 5
Weights 0.070621 -0.273803 0.012465 1.151044 0.088651
1 2 3 4
Weights -0.148375 0.06181 0.358565 0.615264
1 2 3 4 5
Weights -0.14662 0.062013 0.358455 0.615052 0.000948
1 2 3 4
Weights -0.194388 -0.182617 0.506295 0.773552
1 2 3 4 5
Weights -0.19436 -0.182614 0.506285 0.773548 0.000002
1 2 3 4
Weights -0.099093 -0.024259 -0.100024 1.13517
1 2 3 4 5
Weights -0.105609 -0.023233 -0.099337 1.134722 -0.003182
1 2 3 4
Weights -0.140298 -0.083588 0.348142 0.721826
1 2 3 4 5
Weights -0.135511 -0.083624 0.347651 0.721424 0.003769
1 2 3 4
Weights 0.000159 0.000057 0.321995 0.602528
1 2 3 4 5
Weights 0.054852 -0.004346 0.319547 0.598569 0.064712
1 2 3 4
Weights 0.313583 0.032354 0.036196 0.393361
1 2 3 4 5
Weights 0.136403 0.046016 0.040491 0.386956 -0.240626
1 2 3 4
Weights -0.033068 -0.340202 0.600944 0.706937
1 2 3 4 5
Weights 0.004582 -0.339507 0.597841 0.704225 0.033015
1 2 3 4
Weights -0.231421 -0.092669 0.371942 0.87992
1 2 3 4 5
Weights -0.232455 -0.089111 0.371688 0.865684 0.00002
1 2 3 4
Weights -0.170143 -0.145339 0.233367 0.976749
1 2 3 4 5
Weights -0.168677 -0.14491 0.23308 0.974771 0.000337
1 2 3 4
Weights -0.010278 -0.010987 0.273435 0.612782
1 2 3 4 5
Weights -0.041391 -0.009975 0.273758 0.612846 -0.033344
1 2 3 4
Weights -0.156512 -0.09238 0.328982 0.820704
1 2 3 4 5
Weights -0.153399 -0.09205 0.32828 0.819606 0.000879
1 2 3 4
Weights 0.004969 -0.458166 0.406672 0.938217
1 2 3 4 5
Weights 0.058488 -0.457064 0.409246 0.934141 0.061817
1 2 3 4
Weights -0.08685 -0.041725 -0.073327 1.100163
1 2 3 4 5
Weights -0.086059 -0.041804 -0.07338 1.100126 0.000611
1 2 3 4
Weights 0.082665 -0.067993 -0.019753 0.882192
1 2 3 4 5
Weights -0.020729 -0.066508 -0.017789 0.873684 -0.137447
1 2 3 4
Weights -0.173816 -0.125638 0.145103 1.053924
1 2 3 4 5
Weights -0.172577 -0.125406 0.144995 1.052491 0.000149
1 2 3 4
Weights -0.034862 -0.347491 0.267008 0.990461
1 2 3 4 5
Weights 0.01756 -0.348844 0.264646 0.986969 0.050837
1 2 3 4
Weights -0.202948 -0.05138 0.256593 0.906465
1 2 3 4 5
Weights -0.201155 -0.049911 0.255683 0.899936 0.000139
1 2 3 4
Weights -0.111722 -0.078554 0.276436 0.806268
1 2 3 4 5
Weights -0.117522 -0.078612 0.27692 0.8063 -0.003695
1 2 3 4
Weights -0.127425 -0.2547 0.324727 0.949023
1 2 3 4 5
Weights -0.132488 -0.254063 0.326097 0.948316 -0.001966
1 2 3 4
Weights -0.036441 -0.147237 0.376229 0.700271
1 2 3 4 5
Weights -0.024148 -0.14744 0.375913 0.699947 0.012194
1 2 3 4
Weights 0.001857 -0.369751 0.133213 1.143233
1 2 3 4 5
Weights 0.021426 -0.368728 0.134188 1.141427 0.023803
1 2 3 4
Weights 0.010257 -0.499282 0.366416 1.01717
1 2 3 4 5
Weights 0.089276 -0.493209 0.373336 1.005227 0.096273
1 2 3 4
Weights -0.174195 -0.109095 0.2964 0.859678
1 2 3 4 5
Weights -0.172388 -0.108886 0.295924 0.858719 0.000813
1 2 3 4
Weights -0.05009 -0.178747 0.314264 0.865257
1 2 3 4 5
Weights 0.014351 -0.174906 0.307117 0.848776 0.045996
1 2 3 4
Weights -0.068862 -0.284481 0.388201 0.842867
1 2 3 4 5
Weights -0.08148 -0.283843 0.389269 0.84296 -0.011142
1 2 3 4
Weights -0.047376 -0.222041 0.304954 0.869161
1 2 3 4 5
Weights -0.065384 -0.221196 0.305971 0.869018 -0.01693
1 2 3 4
Weights -0.087775 -0.256047 0.165117 1.068152
1 2 3 4 5
Weights -0.094704 -0.255039 0.166162 1.068043 -0.005266
1 2 3 4
Weights -0.146824 -0.275863 0.269877 1.048894
1 2 3 4 5
Weights -0.14696 -0.275833 0.269924 1.048897 -0.000021
1 2 3 4
Weights -0.060379 -0.281576 0.46896 0.789163
1 2 3 4 5
Weights -0.066497 -0.281477 0.46961 0.789246 -0.004843
1 2 3 4
Weights -0.027073 -0.477138 0.552234 0.857717
1 2 3 4 5
Weights 0.004386 -0.47764 0.550308 0.855959 0.030149
1 2 3 4
Weights -0.031205 -0.31744 0.479283 0.786598
1 2 3 4 5
Weights -0.006826 -0.317792 0.478096 0.785263 0.023548
1 2 3 4
Weights 0.019677 -0.192076 0.465378 0.644047
1 2 3 4 5
Weights 0.02916 -0.193263 0.465644 0.643628 0.012987
1 2 3 4
Weights -0.175035 -0.047698 0.165238 0.949378
1 2 3 4 5
Weights -0.176274 -0.047629 0.165407 0.949422 -0.000196
1 2 3 4
Weights -0.013304 -0.260061 0.275381 0.953007
1 2 3 4 5
Weights 0.082557 -0.258476 0.272569 0.937733 0.094509
1 2 3 4
Weights -0.060145 -0.017365 0.121062 0.834593
1 2 3 4 5
Weights -0.045151 -0.018218 0.120296 0.833681 0.016026
1 2 3 4
Weights -0.11589 -0.02772 0.070577 0.980401
1 2 3 4 5
Weights -0.119636 -0.027412 0.070891 0.980239 -0.001705
1 2 3 4
Weights -0.137855 0.019047 0.257351 0.773571
1 2 3 4 5
Weights -0.127878 0.019941 0.255708 0.76917 0.004989
1 2 3 4
Weights -0.14242 -0.224934 0.231638 1.035725
1 2 3 4 5
Weights -0.142818 -0.223228 0.232642 1.030498 0.000915
1 2 3 4
Weights -0.096865 -0.089163 0.420466 0.653252
1 2 3 4 5
Weights -0.105733 -0.089225 0.421043 0.653294 -0.007076
1 2 3 4
Weights -0.216429 -0.066451 0.32581 0.825182
1 2 3 4 5
Weights -0.216823 -0.066497 0.325903 0.825196 -0.000085
1 2 3 4
Weights -0.127089 0.076313 0.103513 0.835885
1 2 3 4 5
Weights -0.122912 0.076286 0.103312 0.83536 0.00256
1 2 3 4
Weights -0.011454 -0.396302 0.542473 0.810253
1 2 3 4 5
Weights 0.057777 -0.395346 0.54036 0.805092 0.068301
1 2 3 4
Weights -0.02051 -0.120254 0.443025 0.604001
1 2 3 4 5
Weights 0.007337 -0.121588 0.442501 0.602932 0.030458
1 2 3 4
Weights -0.0634 -0.119702 0.168568 0.880941
1 2 3 4 5
Weights -0.05177 -0.120178 0.168074 0.880507 0.010839
1 2 3 4
Weights -0.207299 0.008564 0.188248 0.930583
1 2 3 4 5
Weights -0.202524 0.009267 0.187104 0.924209 0.000094
1 2 3 4
Weights -0.08164 -0.116078 0.343197 0.786344
1 2 3 4 5
Weights -0.063506 -0.11508 0.34104 0.783339 0.011491
1 2 3 4
Weights -0.075242 -0.131763 0.221482 0.890717
1 2 3 4 5
Weights -0.080455 -0.13154 0.221892 0.890822 -0.004244
1 2 3 4
Weights -0.068986 -0.028425 0.320641 0.627344
1 2 3 4 5
Weights -0.126953 -0.027742 0.321841 0.626807 -0.055206
1 2 3 4
Weights -0.060834 -0.215924 0.224918 0.916893
1 2 3 4 5
Weights -0.057471 -0.216076 0.224733 0.916782 0.003144
1 2 3 4
Weights 0.091637 -0.310991 0.271896 0.852402
1 2 3 4 5
Weights 0.053739 -0.310818 0.26496 0.849282 -0.061885
1 2 3 4
Weights -0.210401 0.005046 0.215832 0.866152
1 2 3 4 5
Weights -0.209976 0.005006 0.215779 0.866065 0.000075
1 2 3 4
Weights -0.017376 -0.19665 0.259883 0.842372
1 2 3 4 5
Weights -0.024863 -0.196436 0.259952 0.842488 -0.008007
1 2 3 4
Weights -0.054596 -0.365346 0.436941 0.896725
1 2 3 4 5
Weights -0.051746 -0.365452 0.436573 0.896601 0.002285
1 2 3 4
Weights 0.080526 0.15308 0.030096 0.690539
1 2 3 4 5
Weights -0.011599 0.165246 0.037297 0.644382 -0.188107
1 2 3 4
Weights -0.114153 -0.132032 0.283141 0.876998
1 2 3 4 5
Weights -0.115166 -0.132016 0.283313 0.877026 -0.000399
1 2 3 4
Weights -0.054835 -0.059695 0.227531 0.839687
1 2 3 4 5
Weights 0.045156 -0.058013 0.217346 0.810132 0.079007
1 2 3 4
Weights -0.07124 -0.243637 0.520031 0.724902
1 2 3 4 5
Weights -0.053692 -0.242353 0.517417 0.7225 0.011908
1 2 3 4
Weights -0.054432 -0.368236 0.621986 0.736435
1 2 3 4 5
Weights -0.035273 -0.367142 0.618729 0.73431 0.013841
1 2 3 4
Weights -0.018126 -0.327098 0.336516 0.909628
1 2 3 4 5
Weights -0.019194 -0.327069 0.336529 0.909656 -0.001132
1 2 3 4
Weights 0.193591 0.182285 0.049013 0.493012
1 2 3 4 5
Weights 0.096996 0.193655 0.059523 0.483566 -0.151195
1 2 3 4
Weights -0.144758 -0.147211 0.23333 0.955593
1 2 3 4 5
Weights -0.141811 -0.146701 0.232875 0.952149 0.001453
1 2 3 4
Weights -0.067341 -0.261541 0.271173 0.983236
1 2 3 4 5
Weights -0.072079 -0.261172 0.271953 0.983293 -0.003072
1 2 3 4
Weights -0.16143 0.020023 0.100289 0.952162
1 2 3 4 5
Weights -0.158052 0.019899 0.099982 0.950368 0.000451
1 2 3 4
Weights 0.097577 -0.406603 0.420521 0.816355
1 2 3 4 5
Weights 0.082255 -0.404445 0.414695 0.815562 -0.034042
1 2 3 4
Weights -0.129531 -0.047099 0.209134 0.929257
1 2 3 4 5
Weights -0.11695 -0.045575 0.206197 0.919727 0.000586
1 2 3 4
Weights -0.103135 -0.349689 0.353397 0.982907
1 2 3 4 5
Weights -0.108345 -0.348842 0.354502 0.982727 -0.003649
1 2 3 4
Weights -0.024491 -0.172015 0.233097 0.917904
1 2 3 4 5
Weights 0.092466 -0.170151 0.227022 0.894886 0.105803
1 2 3 4
Weights 0.024492 -0.06428 0.278425 0.669044
1 2 3 4 5
Weights 0.064438 -0.06802 0.277482 0.666283 0.051561
1 2 3 4
Weights 0.015033 -0.168414 0.214371 0.868737
1 2 3 4 5
Weights 0.00376 -0.168163 0.21403 0.868745 -0.014312
1 2 3 4
Weights -0.118449 -0.236705 0.163722 1.085047
1 2 3 4 5
Weights -0.118815 -0.236638 0.163805 1.085054 -0.000179
1 2 3 4
Weights -0.13618 -0.043511 0.238105 0.868153
1 2 3 4 5
Weights -0.123957 -0.042145 0.235293 0.858184 0.001935
1 2 3 4
Weights -0.123087 -0.172625 0.298519 0.880914
1 2 3 4 5
Weights -0.122233 -0.17267 0.298386 0.880852 0.000536
1 2 3 4
Weights -0.135197 -0.166029 0.241105 0.954836
1 2 3 4 5
Weights -0.140356 -0.165439 0.242145 0.953238 -0.002244
1 2 3 4
Weights 0.005223 -0.168892 0.367337 0.683269
1 2 3 4 5
Weights 0.053097 -0.170903 0.367343 0.680875 0.05544
1 2 3 4
Weights -0.093757 -0.061781 0.219476 0.841735
1 2 3 4 5
Weights -0.089026 -0.061756 0.219079 0.841269 0.003228
1 2 3 4
Weights -0.0473 -0.257459 0.333606 0.907321
1 2 3 4 5
Weights -0.040035 -0.257543 0.332865 0.906888 0.005526
1 2 3 4
Weights -0.18871 0.051217 -0.058647 1.107192
1 2 3 4 5
Weights -0.18591 0.050963 -0.058304 1.102691 0.000114
1 2 3 4
Weights -0.012666 -0.372996 0.494787 0.807934
1 2 3 4 5
Weights 0.003716 -0.373385 0.494702 0.807385 0.017407
1 2 3 4
Weights -0.198087 -0.100607 0.285524 0.922427
1 2 3 4 5
Weights -0.198514 -0.09822 0.285754 0.912953 0.000122
1 2 3 4
Weights -0.037168 -0.240259 0.19016 0.998595
1 2 3 4 5
Weights -0.049924 -0.239649 0.190839 0.998706 -0.012085
1 2 3 4
Weights 0.10371 -0.262809 0.371145 0.678856
1 2 3 4 5
Weights 0.07404 -0.260846 0.367947 0.678212 -0.043619
1 2 3 4
Weights -0.15825 -0.076183 0.298341 0.855888
1 2 3 4 5
Weights -0.15073 -0.071639 0.294244 0.829727 0.001214
1 2 3 4
Weights -0.17039 -0.017288 0.305664 0.785368
1 2 3 4 5
Weights -0.165298 -0.016148 0.303794 0.777488 0.001119
1 2 3 4
Weights -0.032295 -0.09653 0.340541 0.677644
1 2 3 4 5
Weights 0.071271 -0.099484 0.336545 0.667456 0.110485
1 2 3 4
Weights -0.012808 -0.052476 0.28308 0.717141
1 2 3 4 5
Weights 0.039943 -0.054003 0.28194 0.714616 0.056383
1 2 3 4
Weights -0.128861 -0.215211 0.118351 1.113698
1 2 3 4 5
Weights -0.131932 -0.21435 0.119189 1.113197 -0.001163
1 2 3 4
Weights -0.098565 -0.06401 0.12425 0.919392
1 2 3 4 5
Weights -0.104615 -0.063687 0.124656 0.919477 -0.004701
1 2 3 4
Weights -0.14249 0.102455 0.066075 0.912533
1 2 3 4 5
Weights -0.127911 0.102972 0.065035 0.900003 0.001488
1 2 3 4
Weights 0.065655 -0.285342 0.304309 0.865765
1 2 3 4 5
Weights 0.036992 -0.280749 0.294253 0.857608 -0.06548
1 2 3 4
Weights -0.073317 -0.280684 0.32631 0.940181
1 2 3 4 5
Weights -0.067995 -0.280915 0.325607 0.939716 0.004053
1 2 3 4
Weights -0.142418 0.038219 0.085639 0.943043
1 2 3 4 5
Weights -0.136923 0.038128 0.085217 0.939951 0.001041
1 2 3 4
Weights -0.207308 -0.041345 0.335196 0.814874
1 2 3 4 5
Weights -0.206102 -0.041101 0.33483 0.814019 0.000118
1 2 3 4
Weights -0.028374 -0.226345 0.491865 0.692961
1 2 3 4 5
Weights 0.082603 -0.223564 0.485392 0.68297 0.101518
1 2 3 4
Weights -0.080706 -0.158375 0.200527 0.960518
1 2 3 4 5
Weights -0.066044 -0.158364 0.19912 0.95748 0.010844
1 2 3 4
Weights -0.016394 -0.124353 0.319456 0.769229
1 2 3 4 5
Weights -0.014104 -0.12446 0.319427 0.769173 0.002608
1 2 3 4
Weights -0.133893 0.020416 0.360506 0.653255
1 2 3 4 5
Weights -0.11906 0.022398 0.358198 0.647985 0.006922
1 2 3 4
Weights -0.140065 -0.140432 0.265255 0.950562
1 2 3 4 5
Weights -0.138576 -0.13969 0.264866 0.947367 -0.000648
1 2 3 4
Weights -0.104139 -0.107252 0.229987 0.880197
1 2 3 4 5
Weights -0.105717 -0.107165 0.230148 0.880219 -0.001167
1 2 3 4
Weights -0.195781 -0.069816 0.216669 0.94942
1 2 3 4 5
Weights -0.194798 -0.069743 0.216465 0.94872 0.00008
1 2 3 4
Weights -0.214544 -0.128757 0.256784 0.997251
1 2 3 4 5
Weights -0.214745 -0.128734 0.256861 0.997271 -0.000003
1 2 3 4
Weights -0.16037 -0.11492 0.153685 1.023577
1 2 3 4 5
Weights -0.159424 -0.114912 0.153676 1.022938 0.000191
1 2 3 4
Weights -0.221777 -0.163819 0.326647 0.960096
1 2 3 4 5
Weights -0.222827 -0.162242 0.32726 0.955227 0.000028
1 2 3 4
Weights -0.089778 -0.135533 0.201065 0.905062
1 2 3 4 5
Weights -0.101224 -0.134837 0.202065 0.905145 -0.009345
1 2 3 4
Weights -0.125584 -0.181293 0.013866 1.213356
1 2 3 4 5
Weights -0.129292 -0.179759 0.015374 1.212114 -0.000266
1 2 3 4
Weights -0.019365 -0.11979 0.245406 0.80118
1 2 3 4 5
Weights 0.050087 -0.121084 0.24376 0.796191 0.073445
1 2 3 4
Weights -0.163535 -0.143294 0.282095 0.937828
1 2 3 4 5
Weights -0.163951 -0.143297 0.282232 0.937834 -0.000046
1 2 3 4
Weights -0.0407 -0.240993 0.326819 0.900722
1 2 3 4 5
Weights -0.048344 -0.240846 0.327471 0.900631 -0.005678
1 2 3 4
Weights -0.202487 -0.193891 0.381398 0.901241
1 2 3 4 5
Weights -0.201735 -0.193553 0.381114 0.900115 0.000092
1 2 3 4
Weights -0.062713 0.278859 -0.538007 1.160583
1 2 3 4 5
Weights 0.027866 0.274094 -0.537312 1.154719 0.087107
1 2 3 4
Weights -0.208089 -0.135435 0.45353 0.748619
1 2 3 4 5
Weights -0.210793 -0.135338 0.454092 0.74807 -0.000615
1 2 3 4
Weights 0.099728 -0.37421 0.63946 0.561694
1 2 3 4 5
Weights 0.046959 -0.361512 0.625001 0.553309 -0.091332
1 2 3 4
Weights -0.147911 -0.223315 0.328781 0.924289
1 2 3 4 5
Weights -0.1464 -0.223385 0.328476 0.923893 0.000711
1 2 3 4
Weights -0.064857 -0.057518 0.321597 0.737255
1 2 3 4 5
Weights -0.014162 -0.055864 0.317103 0.727068 0.040054
1 2 3 4
Weights 0.16209 0.031063 -0.004958 0.53419
1 2 3 4 5
Weights 0.27137 0.027862 -0.007888 0.530619 0.127911
1 2 3 4
Weights -0.007037 -0.284018 0.384895 0.79564
1 2 3 4 5
Weights -0.012118 -0.28387 0.384871 0.795721 -0.00559
1 2 3 4
Weights -0.048177 -0.322875 0.104892 1.212352
1 2 3 4 5
Weights -0.047533 -0.322997 0.104751 1.212329 0.000325
1 2 3 4
Weights -0.107267 -0.119918 0.258366 0.878488
1 2 3 4 5
Weights -0.107759 -0.119916 0.258431 0.87852 -0.000269
1 2 3 4
Weights -0.037997 -0.35202 0.466327 0.823829
1 2 3 4 5
Weights -0.048321 -0.351754 0.466933 0.823956 -0.009531
1 2 3 4
Weights -0.030031 -0.004559 0.05903 0.874914
1 2 3 4 5
Weights -0.083364 -0.00299 0.06035 0.874123 -0.055145
1 2 3 4
Weights -0.228435 -0.063043 0.233921 0.967644
1 2 3 4 5
Weights -0.22835 -0.063054 0.233893 0.967625 0.000001
1 2 3 4
Weights -0.117999 -0.124718 0.246435 0.950663
1 2 3 4 5
Weights -0.112194 -0.124334 0.245177 0.946681 0.000795
1 2 3 4
Weights -0.093605 -0.006906 0.102649 0.876906
1 2 3 4 5
Weights -0.10147 -0.006378 0.103179 0.877025 -0.00727
1 2 3 4
Weights -0.129545 -0.023044 0.18386 0.844481
1 2 3 4 5
Weights -0.121358 -0.023368 0.183014 0.843123 0.007093
1 2 3 4
Weights -0.092807 -0.131776 0.282139 0.89545
1 2 3 4 5
Weights -0.092123 -0.131801 0.282045 0.895328 0.000606
1 2 3 4
Weights -0.009498 -0.311705 0.412706 0.824386
1 2 3 4 5
Weights 0.025542 -0.312563 0.412664 0.822252 0.038839
1 2 3 4
Weights -0.128412 -0.118041 0.323202 0.825309
1 2 3 4 5
Weights -0.121346 -0.11755 0.321856 0.822446 0.003289
1 2 3 4
Weights -0.161916 -0.163053 0.330386 0.87085
1 2 3 4 5
Weights -0.163377 -0.162958 0.330752 0.870754 -0.000568
1 2 3 4
Weights 0.010511 0.052463 0.053912 0.704616
1 2 3 4 5
Weights 0.071465 0.049562 0.051951 0.701854 0.073195
1 2 3 4
Weights -0.126994 -0.19504 0.219551 1.019476
1 2 3 4 5
Weights -0.126544 -0.195093 0.219437 1.019419 0.0001
1 2 3 4
Weights -0.115298 -0.20497 0.425823 0.816829
1 2 3 4 5
Weights -0.118532 -0.204895 0.426465 0.816546 -0.001536
1 2 3 4
Weights 0.002651 -0.348249 0.358977 0.939376
1 2 3 4 5
Weights 0.037975 -0.348237 0.360517 0.937565 0.041082
1 2 3 4
Weights -0.108273 -0.129958 0.033109 1.106449
1 2 3 4 5
Weights -0.110591 -0.129553 0.033484 1.106395 -0.001147
1 2 3 4
Weights -0.044944 -0.26287 0.476066 0.758691
1 2 3 4 5
Weights 0.005175 -0.261459 0.471242 0.753527 0.041423
1 2 3 4
Weights -0.013013 -0.317016 0.426181 0.842591
1 2 3 4 5
Weights 0.018159 -0.317199 0.425395 0.841536 0.030737
1 2 3 4
Weights -0.159968 -0.172638 0.334498 0.881469
1 2 3 4 5
Weights -0.15837 -0.172579 0.334126 0.881053 0.000568
1 2 3 4
Weights -0.037341 0.013818 0.223824 0.635338
1 2 3 4 5
Weights -0.167721 0.016914 0.224757 0.631764 -0.134989
1 2 3 4
Weights 0.100711 -0.191716 0.329805 0.692442
1 2 3 4 5
Weights 0.038166 -0.178226 0.318446 0.676698 -0.118909
1 2 3 4
Weights -0.208243 -0.148594 0.549474 0.67443
1 2 3 4 5
Weights -0.209116 -0.148741 0.549813 0.67444 -0.000194
1 2 3 4
Weights -0.141898 -0.110924 0.232945 0.927262
1 2 3 4 5
Weights -0.13799 -0.109969 0.232315 0.921057 0.001642
1 2 3 4
Weights 0.381675 0.062528 0.063654 0.421444
1 2 3 4 5
Weights 0.29245 0.085651 0.078005 0.409657 -0.203269
1 2 3 4
Weights 0.029428 -0.45095 0.512332 0.853102
1 2 3 4 5
Weights 0.008523 -0.449933 0.507501 0.85203 -0.031085
1 2 3 4
Weights -0.032708 -0.262768 0.405787 0.806008
1 2 3 4 5
Weights -0.032454 -0.262773 0.405775 0.806001 0.00024
1 2 3 4
Weights -0.083386 -0.227836 0.200279 1.03655
1 2 3 4 5
Weights -0.093363 -0.22642 0.202266 1.035364 -0.004473
1 2 3 4
Weights 0.015592 -0.129297 0.162262 0.860778
1 2 3 4 5
Weights 0.021478 -0.129493 0.16234 0.860588 0.007624
1 2 3 4
Weights 0.025628 -0.33195 0.415192 0.805635
1 2 3 4 5
Weights 0.014051 -0.33112 0.413862 0.80568 -0.016829
1 2 3 4
Weights 0.053593 0.12952 0.066834 0.606904
1 2 3 4 5
Weights 0.00328 0.131436 0.067874 0.606483 -0.057774
1 2 3 4
Weights -0.035522 -0.112468 0.319545 0.772909
1 2 3 4 5
Weights 0.106602 -0.10992 0.309574 0.745418 0.128015
1 2 3 4
Weights -0.090998 -0.277814 0.439076 0.858399
1 2 3 4 5
Weights -0.090739 -0.277815 0.439017 0.85838 0.000123
1 2 3 4
Weights -0.178236 -0.155317 0.165772 1.076691
1 2 3 4 5
Weights -0.179251 -0.15504 0.166103 1.076586 -0.000028
1 2 3 4
Weights -0.03619 -0.406361 0.481499 0.863265
1 2 3 4 5
Weights -0.063918 -0.404709 0.483187 0.863158 -0.026878
1 2 3 4
Weights -0.133475 -0.06212 0.189467 0.9403
1 2 3 4 5
Weights -0.134126 -0.061759 0.189512 0.939015 -0.001208
1 2 3 4
Weights -0.172166 -0.05671 0.170552 0.962851
1 2 3 4 5
Weights -0.171542 -0.055442 0.170764 0.95381 0.000524
1 2 3 4
Weights -0.00556 -0.395592 0.505087 0.830839
1 2 3 4 5
Weights 0.090929 -0.394688 0.504578 0.824433 0.09953
1 2 3 4
Weights -0.000324 0.01582 0.15117 0.694887
1 2 3 4 5
Weights 0.108891 0.010569 0.147188 0.686294 0.124863
1 2 3 4
Weights 0.128607 -0.114295 0.245675 0.676194
1 2 3 4 5
Weights 0.057336 -0.097893 0.239925 0.657828 -0.136853
1 2 3 4
Weights -0.137674 -0.072332 0.140423 0.960372
1 2 3 4 5
Weights -0.134206 -0.072493 0.139916 0.959258 0.001798
1 2 3 4
Weights 0.024927 -0.328772 0.376807 0.868938
1 2 3 4 5
Weights 0.009687 -0.328452 0.375032 0.868589 -0.02051
1 2 3 4
Weights -0.185035 -0.036549 0.217049 0.916408
1 2 3 4 5
Weights -0.181155 -0.036305 0.216141 0.911942 0.000273
1 2 3 4
Weights -0.243279 -0.045304 0.310472 0.889702
1 2 3 4 5
Weights -0.243465 -0.045292 0.310528 0.889728 -0.000002
1 2 3 4
Weights -0.022376 -0.132982 0.280192 0.818175
1 2 3 4 5
Weights -0.034049 -0.1327 0.280382 0.818207 -0.01186
1 2 3 4
Weights -0.182001 -0.180653 0.320769 0.925469
1 2 3 4 5
Weights -0.181085 -0.180614 0.320453 0.925071 0.000154
1 2 3 4
Weights -0.069672 -0.105255 0.324256 0.781141
1 2 3 4 5
Weights -0.06335 -0.105051 0.323725 0.780753 0.004223
1 2 3 4
Weights -0.016895 -0.23254 0.153256 0.958345
1 2 3 4 5
Weights 0.065064 -0.233209 0.152441 0.954942 0.084021
1 2 3 4
Weights 0.022481 -0.131577 0.21244 0.760523
1 2 3 4 5
Weights 0.027466 -0.131723 0.21248 0.760436 0.005875
1 2 3 4
Weights -0.248839 -0.142512 0.462355 0.841014
1 2 3 4 5
Weights -0.247603 -0.142029 0.461839 0.839817 0.000009
1 2 3 4
Weights 0.004716 -0.273489 0.594216 0.588842
1 2 3 4 5
Weights 0.000594 -0.273319 0.594132 0.588867 -0.004569
1 2 3 4
Weights -0.121799 0.023751 0.167091 0.856263
1 2 3 4 5
Weights -0.110432 0.024552 0.165933 0.852082 0.003101
1 2 3 4
Weights -0.095559 -0.27724 0.229249 1.051423
1 2 3 4 5
Weights -0.094647 -0.277351 0.229062 1.051355 0.000519
1 2 3 4
Weights -0.124513 -0.065585 0.129546 0.959842
1 2 3 4 5
Weights -0.124155 -0.065608 0.129507 0.959817 0.000162
1 2 3 4
Weights -0.039269 -0.211149 0.208379 0.955195
1 2 3 4 5
Weights -0.018269 -0.211788 0.207181 0.954085 0.01868
1 2 3 4
Weights -0.15437 -0.128932 0.363735 0.810107
1 2 3 4 5
Weights -0.150678 -0.128475 0.36275 0.807714 0.001588
1 2 3 4
Weights -0.176825 0.003974 0.257393 0.82991
1 2 3 4 5
Weights -0.174908 0.007808 0.256503 0.806648 0.000889
1 2 3 4
Weights 0.245072 -0.111408 0.129108 0.683725
1 2 3 4 5
Weights 0.178133 -0.087899 0.128602 0.648303 -0.195988
1 2 3 4
Weights -0.053653 -0.156746 0.180478 0.906685
1 2 3 4 5
Weights -0.065968 -0.156202 0.181046 0.906895 -0.011585
1 2 3 4
Weights -0.063273 -0.211343 0.440294 0.752009
1 2 3 4 5
Weights -0.078432 -0.211418 0.441422 0.751666 -0.011723
1 2 3 4
Weights -0.040329 -0.014287 0.176309 0.811266
1 2 3 4 5
Weights 0.046 -0.015916 0.171847 0.797778 0.082509
1 2 3 4
Weights -0.049439 -0.244092 0.255596 0.968896
1 2 3 4 5
Weights -0.038993 -0.244601 0.254807 0.96782 0.010059
1 2 3 4
Weights -0.032799 -0.357149 0.425715 0.911441
1 2 3 4 5
Weights 0.001585 -0.356549 0.421867 0.906827 0.027583
1 2 3 4
Weights -0.195576 -0.099886 0.398822 0.794825
1 2 3 4 5
Weights -0.196164 -0.099906 0.398884 0.79487 -0.000072
1 2 3 4
Weights -0.037229 -0.371153 0.460452 0.836582
1 2 3 4 5
Weights -0.029413 -0.371445 0.460077 0.836311 0.007632
1 2 3 4
Weights -0.187042 -0.108681 0.179253 1.012636
1 2 3 4 5
Weights -0.186233 -0.107632 0.178904 1.009649 0.00016
1 2 3 4
Weights -0.068056 -0.07185 0.216421 0.842565
1 2 3 4 5
Weights -0.052855 -0.072064 0.215238 0.841179 0.012338
1 2 3 4
Weights 0.04445 0.063898 0.055683 0.763813
1 2 3 4 5
Weights -0.011113 0.068285 0.058273 0.76109 -0.076168
1 2 3 4
Weights -0.180262 -0.021293 0.266451 0.829585
1 2 3 4 5
Weights -0.181157 -0.021353 0.266583 0.829451 -0.000243
1 2 3 4
Weights -0.224734 -0.079961 0.329663 0.877935
1 2 3 4 5
Weights -0.22365 -0.079953 0.329431 0.8774 0.000023
1 2 3 4
Weights -0.02929 -0.015031 0.279392 0.715913
1 2 3 4 5
Weights 0.075497 -0.017521 0.272821 0.698632 0.106044
1 2 3 4
Weights -0.026334 -0.363024 0.227977 1.061936
1 2 3 4 5
Weights 0.001354 -0.363806 0.22717 1.059606 0.028516
1 2 3 4
Weights 0.002243 -0.027064 0.152631 0.79091
1 2 3 4 5
Weights -0.03192 -0.025882 0.152863 0.790484 -0.039443
1 2 3 4
Weights 0.057708 -0.333153 0.392261 0.839584
1 2 3 4 5
Weights 0.033865 -0.326294 0.379504 0.8306 -0.062398
1 2 3 4
Weights -0.049498 -0.313568 0.177703 1.071589
1 2 3 4 5
Weights -0.036908 -0.314413 0.176729 1.070865 0.011406
1 2 3 4
Weights -0.06031 -0.040706 0.315503 0.662942
1 2 3 4 5
Weights -0.026242 -0.041571 0.314079 0.660881 0.033787
1 2 3 4
Weights -0.07099 -0.313046 0.515653 0.799314
1 2 3 4 5
Weights -0.059597 -0.312501 0.513712 0.797826 0.007575
1 2 3 4
Weights -0.006761 0.031234 0.114174 0.710793
1 2 3 4 5
Weights 0.010327 0.030795 0.113889 0.710524 0.018469
1 2 3 4
Weights -0.00267 -0.395135 0.335507 0.973531
1 2 3 4 5
Weights 0.039802 -0.394752 0.336936 0.970018 0.049003
1 2 3 4
Weights -0.145198 -0.114883 0.197498 0.956512
1 2 3 4 5
Weights -0.143223 -0.114933 0.197204 0.955757 0.000829
1 2 3 4
Weights -0.029364 -0.206871 0.378952 0.747648
1 2 3 4 5
Weights -0.022916 -0.207078 0.378795 0.747469 0.006641
1 2 3 4
Weights -0.030138 -0.321158 0.504516 0.778814
1 2 3 4 5
Weights 0.006669 -0.321055 0.502022 0.775916 0.034007
1 2 3 4
Weights -0.131325 -0.153309 0.135789 1.04814
1 2 3 4 5
Weights -0.136505 -0.152381 0.137087 1.047219 -0.001545
1 2 3 4
Weights -0.17053 -0.024801 0.203526 0.921763
1 2 3 4 5
Weights -0.16287 -0.023101 0.201945 0.911396 0.00033
1 2 3 4
Weights -0.078537 -0.147354 0.07966 1.073456
1 2 3 4 5
Weights -0.081897 -0.146955 0.080126 1.073511 -0.001809
1 2 3 4
Weights -0.035369 -0.413796 0.355409 0.991736
1 2 3 4 5
Weights -0.023948 -0.414383 0.354643 0.991115 0.011087
1 2 3 4
Weights -0.072281 -0.208971 0.047599 1.136149
1 2 3 4 5
Weights -0.085404 -0.206915 0.049403 1.135457 -0.009695
1 2 3 4
Weights -0.144512 0.028178 0.14538 0.871143
1 2 3 4 5
Weights -0.145247 0.028162 0.145445 0.871193 -0.000277
1 2 3 4
Weights -0.142803 -0.1101 0.340716 0.799687
1 2 3 4 5
Weights -0.143114 -0.110103 0.340766 0.799704 -0.00015
1 2 3 4
Weights -0.124621 -0.258389 0.456455 0.807841
1 2 3 4 5
Weights -0.132439 -0.257889 0.457804 0.807222 -0.004588
1 2 3 4
Weights -0.124354 -0.224497 0.26651 0.990473
1 2 3 4 5
Weights -0.122033 -0.224533 0.265938 0.989665 0.000831
1 2 3 4
Weights -0.190709 -0.005547 0.131807 0.982056
1 2 3 4 5
Weights -0.19122 -0.006118 0.132197 0.981676 -0.000096
1 2 3 4
Weights -0.077748 0.023033 0.323605 0.705532
1 2 3 4 5
Weights -0.067681 0.025328 0.322996 0.703825 0.003049
1 2 3 4
Weights -0.035876 -0.18452 0.314834 0.865413
1 2 3 4 5
Weights 0.015925 -0.183297 0.310942 0.852344 0.046793
1 2 3 4
Weights -0.047262 -0.002229 0.226202 0.70838
1 2 3 4 5
Weights 0.016505 -0.004908 0.223292 0.701968 0.069354
1 2 3 4
Weights 0.06832 -0.209011 0.25416 0.776763
1 2 3 4 5
Weights 0.006662 -0.204369 0.249633 0.771815 -0.089891
1 2 3 4
Weights -0.113635 -0.131823 0.146742 0.998626
1 2 3 4 5
Weights -0.112496 -0.131922 0.14659 0.998517 0.000606
1 2 3 4
Weights -0.228127 -0.106717 0.231812 1.012897
1 2 3 4 5
Weights -0.228613 -0.106584 0.231973 1.01284 -0.000003
1 2 3 4
Weights -0.035312 -0.445716 0.497934 0.925608
1 2 3 4 5
Weights -0.010286 -0.445427 0.493993 0.922929 0.018391
1 2 3 4
Weights -0.142696 0.205387 0.273841 0.485771
1 2 3 4 5
Weights -0.115728 0.205948 0.273118 0.483768 0.02349
1 2 3 4
Weights -0.05705 -0.229491 0.407891 0.805639
1 2 3 4 5
Weights -0.023931 -0.229335 0.40463 0.801064 0.028222
1 2 3 4
Weights -0.19431 -0.187309 0.349044 0.965446
1 2 3 4 5
Weights -0.194258 -0.185735 0.349083 0.960841 -0.000056
1 2 3 4
Weights -0.118332 -0.061904 0.073043 1.051207
1 2 3 4 5
Weights -0.120212 -0.061567 0.073487 1.049934 -0.000855
1 2 3 4
Weights 0.02993 -0.395992 0.469445 0.844986
1 2 3 4 5
Weights 0.018155 -0.395159 0.467101 0.844638 -0.018099
1 2 3 4
Weights -0.106968 -0.078205 0.389156 0.667093
1 2 3 4 5
Weights -0.103187 -0.078206 0.388803 0.666874 0.00308
1 2 3 4
Weights -0.078667 0.038441 0.139088 0.796303
1 2 3 4 5
Weights -0.07348 0.038426 0.138891 0.796076 0.004286
1 2 3 4
Weights 0.056119 0.025977 0.18918 0.671473
1 2 3 4 5
Weights -0.017631 0.037335 0.190299 0.661289 -0.112714
1 2 3 4
Weights -0.046847 -0.174123 0.269078 0.860805
1 2 3 4 5
Weights -0.046997 -0.174117 0.269086 0.86081 -0.000144
1 2 3 4
Weights 0.052456 -0.173319 0.358314 0.687485
1 2 3 4 5
Weights -0.008446 -0.163688 0.351799 0.678623 -0.09401
1 2 3 4
Weights -0.157725 0.068374 0.119818 0.880075
1 2 3 4 5
Weights -0.147618 0.069166 0.118542 0.871151 0.001981
1 2 3 4
Weights -0.015844 0.003139 0.041231 0.858716
1 2 3 4 5
Weights 0.055613 0.0004 0.038651 0.852434 0.081772
1 2 3 4
Weights -0.011016 -0.191651 0.379528 0.746708
1 2 3 4 5
Weights 0.042579 -0.194059 0.379116 0.740314 0.064419
1 2 3 4
Weights -0.078963 -0.334799 0.335658 0.972491
1 2 3 4 5
Weights -0.093326 -0.332955 0.338061 0.971928 -0.010893
1 2 3 4
Weights -0.134622 -0.24039 0.262206 1.015514
1 2 3 4 5
Weights -0.133919 -0.238973 0.262828 1.010427 0.001269
1 2 3 4
Weights -0.001953 -0.096623 0.319345 0.715625
1 2 3 4 5
Weights -0.015952 -0.096038 0.319306 0.715617 -0.015869
1 2 3 4
Weights -0.157859 -0.320383 0.430045 0.96115
1 2 3 4 5
Weights -0.158864 -0.320197 0.430561 0.961009 -0.000041
Predicting t+3...
Fitting t+4...
1 2 3 4
Weights -0.255585 -0.01136 0.313564 0.818924
1 2 3 4 5
Weights -0.245741 -0.011589 0.313518 0.818823 0.010714
1 2 3 4
Weights -0.055682 0.053627 0.145081 0.731377
1 2 3 4 5
Weights -0.105961 0.055978 0.146018 0.730325 -0.05846
1 2 3 4
Weights -0.086472 -0.290653 0.199221 1.072153
1 2 3 4 5
Weights -0.133389 -0.290163 0.19945 1.072148 -0.050632
1 2 3 4
Weights -0.074532 -0.256497 0.560001 0.652402
1 2 3 4 5
Weights -0.026851 -0.258284 0.560781 0.650109 0.054463
1 2 3 4
Weights 0.04503 -0.041891 0.158835 0.788967
1 2 3 4 5
Weights 0.061436 -0.043034 0.158504 0.78781 0.020784
1 2 3 4
Weights -0.071104 -0.204754 0.120356 1.038711
1 2 3 4 5
Weights -0.029983 -0.204392 0.120757 1.036823 0.04534
1 2 3 4
Weights -0.08688 -0.142759 0.415225 0.736634
1 2 3 4 5
Weights -0.090747 -0.142531 0.415154 0.736635 -0.004649
1 2 3 4
Weights -0.185678 -0.066327 0.101592 1.049717
1 2 3 4 5
Weights -0.225683 -0.065694 0.101771 1.048769 -0.045736
1 2 3 4
Weights -0.129815 -0.097025 0.078189 1.056087
1 2 3 4 5
Weights -0.117302 -0.096949 0.078351 1.055669 0.015002
1 2 3 4
Weights -0.021637 -0.243946 -0.082888 1.25544
1 2 3 4 5
Weights 0.013813 -0.242242 -0.081896 1.252394 0.04061
1 2 3 4
Weights -0.158904 -0.222866 0.646848 0.591004
1 2 3 4 5
Weights -0.114827 -0.224826 0.647076 0.589775 0.049013
1 2 3 4
Weights -0.041966 -0.098744 0.246228 0.797511
1 2 3 4 5
Weights -0.092515 -0.094482 0.245522 0.793946 -0.066857
1 2 3 4
Weights 0.302188 -0.254555 0.42137 0.454084
1 2 3 4 5
Weights 0.196567 -0.246676 0.420668 0.453937 -0.118695
1 2 3 4
Weights -0.033364 -0.239709 0.502055 0.69789
1 2 3 4 5
Weights -0.072966 -0.237779 0.500905 0.697021 -0.04488
1 2 3 4
Weights 0.015932 -0.166423 0.240362 0.848702
1 2 3 4 5
Weights -0.004902 -0.165554 0.240136 0.848548 -0.024911
1 2 3 4
Weights -0.061307 -0.197824 0.167681 0.994309
1 2 3 4 5
Weights -0.081709 -0.197685 0.167456 0.994315 -0.02311
1 2 3 4
Weights -0.080071 -0.155193 0.237343 0.907938
1 2 3 4 5
Weights -0.065651 -0.155447 0.237578 0.907483 0.017203
1 2 3 4
Weights -0.054032 0.094842 0.238382 0.609204
1 2 3 4 5
Weights -0.048261 0.094509 0.238226 0.609135 0.006586
1 2 3 4
Weights -0.136526 -0.028739 0.178496 0.911827
1 2 3 4 5
Weights -0.240742 -0.013815 0.175313 0.86162 -0.163706
1 2 3 4
Weights 0.036465 -0.410082 0.254123 1.017688
1 2 3 4 5
Weights 0.095343 -0.407507 0.25803 1.008129 0.073889
1 2 3 4
Weights -0.138808 -0.140558 0.222161 0.9519
1 2 3 4 5
Weights -0.131643 -0.140678 0.22225 0.951743 0.008385
1 2 3 4
Weights -0.101423 -0.189763 0.264213 0.914141
1 2 3 4 5
Weights -0.091662 -0.189891 0.264323 0.913911 0.011082
1 2 3 4
Weights -0.069466 -0.255923 0.13 1.076936
1 2 3 4 5
Weights 0.034924 -0.252617 0.132567 1.066387 0.115106
1 2 3 4
Weights -0.185295 -0.016679 -0.074843 1.191167
1 2 3 4 5
Weights -0.17778 -0.016561 -0.074825 1.191035 0.00827
1 2 3 4
Weights 0.009591 -0.300259 0.464256 0.756829
1 2 3 4 5
Weights 0.011269 -0.300312 0.464285 0.756809 0.001866
1 2 3 4
Weights -0.004418 -0.162062 0.325373 0.773807
1 2 3 4 5
Weights -0.084504 -0.153524 0.32072 0.762055 -0.106866
1 2 3 4
Weights -0.253484 0.068496 0.305645 0.809106
1 2 3 4 5
Weights -0.273227 0.070445 0.304778 0.806654 -0.027989
1 2 3 4
Weights 0.051855 -0.234672 0.396711 0.712681
1 2 3 4 5
Weights 0.050655 -0.234576 0.396693 0.712718 -0.00149
1 2 3 4
Weights -0.06007 -0.049043 0.263158 0.724431
1 2 3 4 5
Weights -0.082561 -0.048082 0.2633 0.724377 -0.025622
1 2 3 4
Weights 0.121114 -0.398591 0.344617 0.847636
1 2 3 4 5
Weights 0.064764 -0.396156 0.342585 0.846682 -0.066078
1 2 3 4
Weights -0.085934 -0.380161 0.471132 0.929467
1 2 3 4 5
Weights -0.045063 -0.379734 0.472642 0.927474 0.045167
1 2 3 4
Weights -0.022663 -0.364238 0.241989 1.040159
1 2 3 4 5
Weights 0.033765 -0.362553 0.244062 1.034602 0.065332
1 2 3 4
Weights -0.100184 -0.107912 0.18664 0.925849
1 2 3 4 5
Weights -0.145633 -0.106522 0.186249 0.923921 -0.054079
1 2 3 4
Weights -0.05212 -0.216472 0.344732 0.843072
1 2 3 4 5
Weights 0.022829 -0.216557 0.345303 0.840909 0.079224
1 2 3 4
Weights 0.170159 0.113226 -0.024498 0.634672
1 2 3 4 5
Weights 0.042389 0.116738 -0.021037 0.632042 -0.140791
1 2 3 4
Weights -0.030164 -0.123085 0.301298 0.791196
1 2 3 4 5
Weights -0.019364 -0.123604 0.301205 0.790697 0.013851
1 2 3 4
Weights -0.051505 0.010284 0.142652 0.797204
1 2 3 4 5
Weights -0.102452 0.012407 0.143187 0.795603 -0.059837
1 2 3 4
Weights -0.095866 -0.111411 0.232374 0.843406
1 2 3 4 5
Weights -0.006705 -0.112596 0.233083 0.832491 0.108375
1 2 3 4
Weights 0.039805 -0.386263 0.324459 0.959719
1 2 3 4 5
Weights 0.064371 -0.38602 0.326494 0.956362 0.033195
1 2 3 4
Weights -0.035947 -0.371006 0.345183 0.990275
1 2 3 4 5
Weights -0.011076 -0.370749 0.346238 0.989061 0.028672
1 2 3 4
Weights -0.000588 0.001096 0.316104 0.615533
1 2 3 4 5
Weights 0.017925 -0.000201 0.315727 0.614983 0.021698
1 2 3 4
Weights 0.117621 0.113051 0.004942 0.647582
1 2 3 4 5
Weights 0.162543 0.111676 0.003665 0.64647 0.048515
1 2 3 4
Weights -0.139885 0.094827 0.219763 0.740501
1 2 3 4 5
Weights -0.206698 0.09902 0.2197 0.734459 -0.0811
1 2 3 4
Weights -0.150142 -0.012396 0.204563 0.840102
1 2 3 4 5
Weights -0.237417 -0.009556 0.204432 0.836009 -0.097851
1 2 3 4
Weights -0.131747 -0.110322 0.28954 0.867265
1 2 3 4 5
Weights -0.144976 -0.109821 0.289286 0.867032 -0.015962
1 2 3 4
Weights 0.055366 -0.559806 0.343571 1.066679
1 2 3 4 5
Weights 0.125233 -0.55328 0.350214 1.05196 0.087032
1 2 3 4
Weights -0.04913 -0.06709 0.20988 0.805025
1 2 3 4 5
Weights -0.058407 -0.066752 0.209898 0.805058 -0.010846
1 2 3 4
Weights -0.064604 -0.19958 0.154244 1.043617
1 2 3 4 5
Weights -0.053953 -0.199461 0.154777 1.04307 0.014083
1 2 3 4
Weights -0.172486 -0.207997 0.384411 0.899286
1 2 3 4 5
Weights -0.181275 -0.207746 0.384225 0.899289 -0.0102
1 2 3 4
Weights 0.280663 0.076554 0.075619 0.503552
1 2 3 4 5
Weights 0.103983 0.084257 0.080257 0.499471 -0.197913
1 2 3 4
Weights -0.023608 -0.217333 0.123052 1.039497
1 2 3 4 5
Weights -0.027874 -0.217343 0.122999 1.039538 -0.004722
1 2 3 4
Weights -0.060199 -0.109318 0.389158 0.671495
1 2 3 4 5
Weights -0.024058 -0.111008 0.389053 0.67032 0.041417
1 2 3 4
Weights -0.103878 -0.049245 0.226935 0.853896
1 2 3 4 5
Weights -0.220117 -0.034485 0.219563 0.810334 -0.16911
1 2 3 4
Weights 0.064955 -0.32729 0.273072 0.924667
1 2 3 4 5
Weights 0.135535 -0.32714 0.274298 0.91846 0.07938
1 2 3 4
Weights -0.217135 0.108121 0.38163 0.611587
1 2 3 4 5
Weights -0.365904 0.120281 0.378413 0.593103 -0.175849
1 2 3 4
Weights 0.046668 0.254702 -0.008694 0.567613
1 2 3 4 5
Weights -0.245544 0.258009 -0.006667 0.552985 -0.311905
1 2 3 4
Weights -0.144928 -0.148965 0.15578 1.064264
1 2 3 4 5
Weights -0.158491 -0.148919 0.155262 1.063569 -0.01891
1 2 3 4
Weights 0.102255 -0.094667 0.168106 0.722174
1 2 3 4 5
Weights 0.07212 -0.093679 0.168423 0.722402 -0.03354
1 2 3 4
Weights 0.007507 -0.309365 0.068561 1.122165
1 2 3 4 5
Weights 0.036469 -0.308929 0.068938 1.12103 0.031375
1 2 3 4
Weights -0.056488 -0.093323 0.301315 0.759553
1 2 3 4 5
Weights -0.031957 -0.093971 0.30134 0.759058 0.027214
1 2 3 4
Weights -0.088228 -0.108627 0.095778 1.018565
1 2 3 4 5
Weights -0.139577 -0.107478 0.095387 1.014303 -0.065161
1 2 3 4
Weights 0.118567 0.115323 0.059806 0.608699
1 2 3 4 5
Weights 0.056567 0.117383 0.061347 0.608597 -0.067959
1 2 3 4
Weights 0.202978 -0.116824 0.268232 0.583766
1 2 3 4 5
Weights 0.13585 -0.112558 0.268816 0.583802 -0.077192
1 2 3 4
Weights -0.207456 0.030362 0.349792 0.726016
1 2 3 4 5
Weights -0.331065 0.041997 0.346265 0.70549 -0.152827
1 2 3 4
Weights 0.107526 0.034438 -0.100424 0.865525
1 2 3 4 5
Weights 0.195384 0.033375 -0.101753 0.862663 0.093552
1 2 3 4
Weights -0.06008 -0.23499 0.064896 1.127952
1 2 3 4 5
Weights -0.022391 -0.234172 0.065566 1.125764 0.042081
1 2 3 4
Weights -0.214086 -0.064483 0.353301 0.786902
1 2 3 4 5
Weights -0.172109 -0.065281 0.353162 0.785926 0.045596
1 2 3 4
Weights -0.11463 -0.124054 -0.082009 1.240807
1 2 3 4 5
Weights -0.096399 -0.123365 -0.081652 1.239887 0.020966
1 2 3 4
Weights 0.051376 0.014064 0.042212 0.812644
1 2 3 4 5
Weights -0.022008 0.016076 0.043406 0.810067 -0.085276
1 2 3 4
Weights -0.014391 -0.274008 0.012119 1.158152
1 2 3 4 5
Weights 0.085992 -0.270233 0.015184 1.148635 0.110992
1 2 3 4
Weights -0.148375 0.06181 0.358565 0.615264
1 2 3 4 5
Weights -0.175006 0.063394 0.35873 0.614733 -0.030448
1 2 3 4
Weights -0.194388 -0.182617 0.506295 0.773552
1 2 3 4 5
Weights -0.217557 -0.181673 0.505807 0.773076 -0.026025
1 2 3 4
Weights -0.099093 -0.024259 -0.100024 1.13517
1 2 3 4 5
Weights -0.092965 -0.024162 -0.100021 1.135057 0.007064
1 2 3 4
Weights -0.140298 -0.083588 0.348142 0.721826
1 2 3 4 5
Weights -0.314346 -0.08164 0.348187 0.719904 -0.178201
1 2 3 4
Weights 0.000159 0.000057 0.321995 0.602528
1 2 3 4 5
Weights 0.055006 -0.004349 0.319536 0.598551 0.064857
1 2 3 4
Weights 0.313583 0.032354 0.036196 0.393361
1 2 3 4 5
Weights 0.337037 0.031329 0.035394 0.392724 0.025115
1 2 3 4
Weights -0.033068 -0.340202 0.600944 0.706937
1 2 3 4 5
Weights -0.029164 -0.340349 0.601069 0.706891 0.004405
1 2 3 4
Weights -0.231421 -0.092669 0.371942 0.87992
1 2 3 4 5
Weights -0.360284 -0.068268 0.362691 0.824954 -0.181421
1 2 3 4
Weights -0.170143 -0.145339 0.233367 0.976749
1 2 3 4 5
Weights -0.202021 -0.144591 0.233307 0.976354 -0.036263
1 2 3 4
Weights -0.010278 -0.010987 0.273435 0.612782
1 2 3 4 5
Weights -0.042433 -0.009828 0.273713 0.612781 -0.035083
1 2 3 4
Weights -0.156512 -0.09238 0.328982 0.820704
1 2 3 4 5
Weights -0.159526 -0.092238 0.32893 0.820688 -0.003652
1 2 3 4
Weights 0.004969 -0.458166 0.406672 0.938217
1 2 3 4 5
Weights 0.056862 -0.457487 0.408417 0.934508 0.058522
1 2 3 4
Weights -0.08685 -0.041725 -0.073327 1.100163
1 2 3 4 5
Weights -0.044196 -0.04078 -0.072852 1.097228 0.051103
1 2 3 4
Weights 0.082665 -0.067993 -0.019753 0.882192
1 2 3 4 5
Weights -0.133772 -0.065803 -0.01815 0.862098 -0.243637
1 2 3 4
Weights -0.173816 -0.125638 0.145103 1.053924
1 2 3 4 5
Weights -0.203465 -0.125262 0.145083 1.053519 -0.033732
1 2 3 4
Weights -0.034862 -0.347491 0.267008 0.990461
1 2 3 4 5
Weights 0.051814 -0.346012 0.268639 0.98438 0.09464
1 2 3 4
Weights -0.202948 -0.05138 0.256593 0.906465
1 2 3 4 5
Weights -0.272818 -0.045862 0.25471 0.896118 -0.089556
1 2 3 4
Weights -0.111722 -0.078554 0.276436 0.806268
1 2 3 4 5
Weights -0.095396 -0.079017 0.27646 0.806018 0.018416
1 2 3 4
Weights -0.127425 -0.2547 0.324727 0.949023
1 2 3 4 5
Weights -0.110424 -0.254834 0.324996 0.948427 0.019177
1 2 3 4
Weights -0.036441 -0.147237 0.376229 0.700271
1 2 3 4 5
Weights -0.023846 -0.147684 0.376263 0.70007 0.014039
1 2 3 4
Weights 0.001857 -0.369751 0.133213 1.143233
1 2 3 4 5
Weights 0.019716 -0.368959 0.13396 1.141673 0.021421
1 2 3 4
Weights 0.010257 -0.499282 0.366416 1.01717
1 2 3 4 5
Weights 0.08355 -0.496049 0.369822 1.008163 0.084153
1 2 3 4
Weights -0.174195 -0.109095 0.2964 0.859678
1 2 3 4 5
Weights -0.190442 -0.108721 0.296403 0.85974 -0.018174
1 2 3 4
Weights -0.05009 -0.178747 0.314264 0.865257
1 2 3 4 5
Weights -0.006298 -0.179505 0.314793 0.863402 0.04929
1 2 3 4
Weights -0.068862 -0.284481 0.388201 0.842867
1 2 3 4 5
Weights -0.019107 -0.284935 0.389026 0.840379 0.055857
1 2 3 4
Weights -0.047376 -0.222041 0.304954 0.869161
1 2 3 4 5
Weights -0.027892 -0.22247 0.305375 0.868358 0.023091
1 2 3 4
Weights -0.087775 -0.256047 0.165117 1.068152
1 2 3 4 5
Weights -0.013063 -0.253 0.167922 1.058014 0.087948
1 2 3 4
Weights -0.146824 -0.275863 0.269877 1.048894
1 2 3 4 5
Weights -0.150829 -0.275843 0.269841 1.049009 -0.004462
1 2 3 4
Weights -0.060379 -0.281576 0.46896 0.789163
1 2 3 4 5
Weights -0.062347 -0.281492 0.468904 0.78918 -0.002277
1 2 3 4
Weights -0.027073 -0.477138 0.552234 0.857717
1 2 3 4 5
Weights -0.005083 -0.477355 0.552987 0.85688 0.024632
1 2 3 4
Weights -0.031205 -0.31744 0.479283 0.786598
1 2 3 4 5
Weights -0.008935 -0.318123 0.479858 0.785793 0.025648
1 2 3 4
Weights 0.019677 -0.192076 0.465378 0.644047
1 2 3 4 5
Weights 0.050031 -0.194685 0.465091 0.641592 0.037483
1 2 3 4
Weights -0.175035 -0.047698 0.165238 0.949378
1 2 3 4 5
Weights -0.162079 -0.047815 0.165229 0.94918 0.014467
1 2 3 4
Weights -0.013304 -0.260061 0.275381 0.953007
1 2 3 4 5
Weights 0.049287 -0.259745 0.276116 0.946435 0.071637
1 2 3 4
Weights -0.060145 -0.017365 0.121062 0.834593
1 2 3 4 5
Weights -0.006704 -0.019501 0.119887 0.83037 0.065464
1 2 3 4
Weights -0.11589 -0.02772 0.070577 0.980401
1 2 3 4 5
Weights -0.091373 -0.027857 0.070574 0.979571 0.028798
1 2 3 4
Weights -0.137855 0.019047 0.257351 0.773571
1 2 3 4 5
Weights -0.159431 0.020313 0.256955 0.772563 -0.026703
1 2 3 4
Weights -0.14242 -0.224934 0.231638 1.035725
1 2 3 4 5
Weights -0.223703 -0.22248 0.232309 1.032441 -0.089925
1 2 3 4
Weights -0.096865 -0.089163 0.420466 0.653252
1 2 3 4 5
Weights -0.072506 -0.090379 0.420432 0.652742 0.027697
1 2 3 4
Weights -0.216429 -0.066451 0.32581 0.825182
1 2 3 4 5
Weights -0.168817 -0.067275 0.32557 0.823313 0.05282
1 2 3 4
Weights -0.127089 0.076313 0.103513 0.835885
1 2 3 4 5
Weights -0.181537 0.077926 0.104143 0.834134 -0.062417
1 2 3 4
Weights -0.011454 -0.396302 0.542473 0.810253
1 2 3 4 5
Weights 0.032561 -0.396478 0.543716 0.808106 0.048767
1 2 3 4
Weights -0.02051 -0.120254 0.443025 0.604001
1 2 3 4 5
Weights -0.010473 -0.120959 0.443015 0.603791 0.01179
1 2 3 4
Weights -0.0634 -0.119702 0.168568 0.880941
1 2 3 4 5
Weights -0.022243 -0.120197 0.168561 0.879727 0.045156
1 2 3 4
Weights -0.207299 0.008564 0.188248 0.930583
1 2 3 4 5
Weights -0.274393 0.012866 0.187402 0.918658 -0.087707
1 2 3 4
Weights -0.08164 -0.116078 0.343197 0.786344
1 2 3 4 5
Weights -0.037507 -0.116794 0.343428 0.785317 0.047979
1 2 3 4
Weights -0.075242 -0.131763 0.221482 0.890717
1 2 3 4 5
Weights -0.131721 -0.129046 0.220406 0.886867 -0.069559
1 2 3 4
Weights -0.068986 -0.028425 0.320641 0.627344
1 2 3 4 5
Weights -0.129079 -0.026434 0.32081 0.626698 -0.064978
1 2 3 4
Weights -0.060834 -0.215924 0.224918 0.916893
1 2 3 4 5
Weights 0.003364 -0.216006 0.225483 0.913975 0.070209
1 2 3 4
Weights 0.091637 -0.310991 0.271896 0.852402
1 2 3 4 5
Weights 0.051967 -0.31003 0.271341 0.852394 -0.044467
1 2 3 4
Weights -0.210401 0.005046 0.215832 0.866152
1 2 3 4 5
Weights -0.181874 0.00449 0.215757 0.865541 0.03226
1 2 3 4
Weights -0.017376 -0.19665 0.259883 0.842372
1 2 3 4 5
Weights -0.027144 -0.196411 0.259799 0.842468 -0.011027
1 2 3 4
Weights -0.054596 -0.365346 0.436941 0.896725
1 2 3 4 5
Weights -0.013722 -0.365222 0.438344 0.894105 0.046656
1 2 3 4
Weights 0.080526 0.15308 0.030096 0.690539
1 2 3 4 5
Weights -0.054137 0.158233 0.033897 0.681653 -0.158068
1 2 3 4
Weights -0.114153 -0.132032 0.283141 0.876998
1 2 3 4 5
Weights -0.128513 -0.131455 0.282822 0.876722 -0.017634
1 2 3 4
Weights -0.054835 -0.059695 0.227531 0.839687
1 2 3 4 5
Weights -0.004208 -0.061197 0.227448 0.836123 0.05869
1 2 3 4
Weights -0.07124 -0.243637 0.520031 0.724902
1 2 3 4 5
Weights -0.039881 -0.244441 0.520603 0.724149 0.034831
1 2 3 4
Weights -0.054432 -0.368236 0.621986 0.736435
1 2 3 4 5
Weights -0.024624 -0.368973 0.622781 0.735413 0.033296
1 2 3 4
Weights -0.018126 -0.327098 0.336516 0.909628
1 2 3 4 5
Weights -0.010463 -0.327193 0.33671 0.909388 0.008821
1 2 3 4
Weights 0.193591 0.182285 0.049013 0.493012
1 2 3 4 5
Weights 0.093004 0.185687 0.05167 0.492684 -0.108569
1 2 3 4
Weights -0.144758 -0.147211 0.23333 0.955593
1 2 3 4 5
Weights -0.190911 -0.145869 0.233033 0.954101 -0.053443
1 2 3 4
Weights -0.067341 -0.261541 0.271173 0.983236
1 2 3 4 5
Weights -0.123003 -0.259077 0.269393 0.978369 -0.067628
1 2 3 4
Weights -0.16143 0.020023 0.100289 0.952162
1 2 3 4 5
Weights -0.215677 0.021918 0.100729 0.946815 -0.068361
1 2 3 4
Weights 0.097577 -0.406603 0.420521 0.816355
1 2 3 4 5
Weights 0.125344 -0.407596 0.421673 0.813894 0.034541
1 2 3 4
Weights -0.129531 -0.047099 0.209134 0.929257
1 2 3 4 5
Weights -0.169429 -0.042383 0.206248 0.916507 -0.065375
1 2 3 4
Weights -0.103135 -0.349689 0.353397 0.982907
1 2 3 4 5
Weights -0.034749 -0.347944 0.355022 0.975368 0.077643
1 2 3 4
Weights -0.024491 -0.172015 0.233097 0.917904
1 2 3 4 5
Weights 0.013959 -0.172519 0.233797 0.915391 0.045376
1 2 3 4
Weights 0.024492 -0.06428 0.278425 0.669044
1 2 3 4 5
Weights 0.075505 -0.068236 0.276783 0.66517 0.061464
1 2 3 4
Weights 0.015033 -0.168414 0.214371 0.868737
1 2 3 4 5
Weights 0.036508 -0.168723 0.214529 0.867883 0.024886
1 2 3 4
Weights -0.118449 -0.236705 0.163722 1.085047
1 2 3 4 5
Weights -0.093973 -0.236371 0.163976 1.083574 0.027682
1 2 3 4
Weights -0.13618 -0.043511 0.238105 0.868153
1 2 3 4 5
Weights -0.214063 -0.036791 0.234906 0.852436 -0.105426
1 2 3 4
Weights -0.123087 -0.172625 0.298519 0.880914
1 2 3 4 5
Weights -0.112201 -0.172805 0.298614 0.880661 0.012308
1 2 3 4
Weights -0.135197 -0.166029 0.241105 0.954836
1 2 3 4 5
Weights -0.080511 -0.165621 0.242176 0.949984 0.063817
1 2 3 4
Weights 0.005223 -0.168892 0.367337 0.683269
1 2 3 4 5
Weights 0.057027 -0.170973 0.367092 0.680517 0.059138
1 2 3 4
Weights -0.093757 -0.061781 0.219476 0.841735
1 2 3 4 5
Weights -0.244783 -0.04969 0.215211 0.808733 -0.190642
1 2 3 4
Weights -0.0473 -0.257459 0.333606 0.907321
1 2 3 4 5
Weights -0.044572 -0.257516 0.333715 0.90727 0.003306
1 2 3 4
Weights -0.18871 0.051217 -0.058647 1.107192
1 2 3 4 5
Weights -0.258247 0.051751 -0.056572 1.098601 -0.085925
1 2 3 4
Weights -0.012666 -0.372996 0.494787 0.807934
1 2 3 4 5
Weights 0.001816 -0.37337 0.495246 0.807517 0.016552
1 2 3 4
Weights -0.198087 -0.100607 0.285524 0.922427
1 2 3 4 5
Weights -0.316581 -0.090311 0.282945 0.899568 -0.146138
1 2 3 4
Weights -0.037168 -0.240259 0.19016 0.998595
1 2 3 4 5
Weights -0.038745 -0.24025 0.190126 0.998626 -0.001855
1 2 3 4
Weights 0.10371 -0.262809 0.371145 0.678856
1 2 3 4 5
Weights 0.071093 -0.261343 0.370907 0.679141 -0.036737
1 2 3 4
Weights -0.15825 -0.076183 0.298341 0.855888
1 2 3 4 5
Weights -0.290576 -0.055278 0.288644 0.802364 -0.189966
1 2 3 4
Weights -0.17039 -0.017288 0.305664 0.785368
1 2 3 4 5
Weights -0.349676 -0.003699 0.299648 0.752393 -0.214096
1 2 3 4
Weights -0.032295 -0.09653 0.340541 0.677644
1 2 3 4 5
Weights 0.043027 -0.099954 0.339448 0.672197 0.087863
1 2 3 4
Weights -0.012808 -0.052476 0.28308 0.717141
1 2 3 4 5
Weights 0.004293 -0.053268 0.282952 0.716844 0.019637
1 2 3 4
Weights -0.128861 -0.215211 0.118351 1.113698
1 2 3 4 5
Weights -0.079085 -0.214031 0.119056 1.109802 0.05518
1 2 3 4
Weights -0.098565 -0.06401 0.12425 0.919392
1 2 3 4 5
Weights -0.057906 -0.064397 0.12424 0.917945 0.04563
1 2 3 4
Weights -0.14249 0.102455 0.066075 0.912533
1 2 3 4 5
Weights -0.209789 0.10614 0.066728 0.89856 -0.094264
1 2 3 4
Weights 0.065655 -0.285342 0.304309 0.865765
1 2 3 4 5
Weights 0.098434 -0.285971 0.304825 0.863844 0.037766
1 2 3 4
Weights -0.073317 -0.280684 0.32631 0.940181
1 2 3 4 5
Weights -0.045322 -0.280782 0.327432 0.938157 0.034653
1 2 3 4
Weights -0.142418 0.038219 0.085639 0.943043
1 2 3 4 5
Weights -0.170435 0.039339 0.085817 0.940313 -0.03869
1 2 3 4
Weights -0.207308 -0.041345 0.335196 0.814874
1 2 3 4 5
Weights -0.215715 -0.040733 0.335034 0.814635 -0.010809
1 2 3 4
Weights -0.028374 -0.226345 0.491865 0.692961
1 2 3 4 5
Weights 0.032024 -0.227468 0.492251 0.691142 0.065263
1 2 3 4
Weights -0.080706 -0.158375 0.200527 0.960518
1 2 3 4 5
Weights -0.018809 -0.158073 0.201311 0.956851 0.068849
1 2 3 4
Weights -0.016394 -0.124353 0.319456 0.769229
1 2 3 4 5
Weights -0.046534 -0.121678 0.318567 0.76754 -0.040459
1 2 3 4
Weights -0.133893 0.020416 0.360506 0.653255
1 2 3 4 5
Weights -0.235178 0.033338 0.35644 0.636006 -0.133121
1 2 3 4
Weights -0.140065 -0.140432 0.265255 0.950562
1 2 3 4 5
Weights -0.096834 -0.139755 0.266857 0.946471 0.052656
1 2 3 4
Weights -0.104139 -0.107252 0.229987 0.880197
1 2 3 4 5
Weights -0.056889 -0.108439 0.230446 0.875961 0.059373
1 2 3 4
Weights -0.195781 -0.069816 0.216669 0.94942
1 2 3 4 5
Weights -0.204617 -0.069502 0.216615 0.949336 -0.010983
1 2 3 4
Weights -0.214544 -0.128757 0.256784 0.997251
1 2 3 4 5
Weights -0.225625 -0.128532 0.256569 0.997102 -0.013359
1 2 3 4
Weights -0.16037 -0.11492 0.153685 1.023577
1 2 3 4 5
Weights -0.15495 -0.114954 0.153738 1.023437 0.006548
1 2 3 4
Weights -0.221777 -0.163819 0.326647 0.960096
1 2 3 4 5
Weights -0.299189 -0.15905 0.326132 0.953671 -0.090504
1 2 3 4
Weights -0.089778 -0.135533 0.201065 0.905062
1 2 3 4 5
Weights -0.065151 -0.135804 0.201164 0.904406 0.027504
1 2 3 4
Weights -0.125584 -0.181293 0.013866 1.213356
1 2 3 4 5
Weights -0.121351 -0.181185 0.013924 1.213214 0.004813
1 2 3 4
Weights -0.019365 -0.11979 0.245406 0.80118
1 2 3 4 5
Weights 0.039337 -0.120677 0.245253 0.797821 0.067315
1 2 3 4
Weights -0.163535 -0.143294 0.282095 0.937828
1 2 3 4 5
Weights -0.149235 -0.143514 0.282475 0.937337 0.017737
1 2 3 4
Weights -0.0407 -0.240993 0.326819 0.900722
1 2 3 4 5
Weights -0.046244 -0.240846 0.32665 0.900733 -0.006581
1 2 3 4
Weights -0.202487 -0.193891 0.381398 0.901241
1 2 3 4 5
Weights -0.216527 -0.193596 0.381351 0.901339 -0.015378
1 2 3 4
Weights -0.062713 0.278859 -0.538007 1.160583
1 2 3 4 5
Weights 0.020282 0.281502 -0.539873 1.158244 0.092844
1 2 3 4
Weights -0.208089 -0.135435 0.45353 0.748619
1 2 3 4 5
Weights -0.132128 -0.1363 0.453455 0.745663 0.082175
1 2 3 4
Weights 0.099728 -0.37421 0.63946 0.561694
1 2 3 4 5
Weights 0.044523 -0.371015 0.637804 0.561137 -0.061165
1 2 3 4
Weights -0.147911 -0.223315 0.328781 0.924289
1 2 3 4 5
Weights -0.100439 -0.223332 0.329556 0.921261 0.05386
1 2 3 4
Weights -0.064857 -0.057518 0.321597 0.737255
1 2 3 4 5
Weights -0.025217 -0.059121 0.321305 0.735595 0.045471
1 2 3 4
Weights 0.16209 0.031063 -0.004958 0.53419
1 2 3 4 5
Weights 0.45254 0.02635 -0.009897 0.526468 0.307502
1 2 3 4
Weights -0.007037 -0.284018 0.384895 0.79564
1 2 3 4 5
Weights -0.011599 -0.283889 0.384828 0.795707 -0.005136
1 2 3 4
Weights -0.048177 -0.322875 0.104892 1.212352
1 2 3 4 5
Weights -0.017188 -0.321832 0.105991 1.210958 0.033674
1 2 3 4
Weights -0.107267 -0.119918 0.258366 0.878488
1 2 3 4 5
Weights -0.115383 -0.119552 0.258202 0.878442 -0.010222
1 2 3 4
Weights -0.037997 -0.35202 0.466327 0.823829
1 2 3 4 5
Weights 0.002386 -0.352403 0.467177 0.822323 0.044628
1 2 3 4
Weights -0.030031 -0.004559 0.05903 0.874914
1 2 3 4 5
Weights -0.084985 -0.003239 0.059659 0.873481 -0.063188
1 2 3 4
Weights -0.228435 -0.063043 0.233921 0.967644
1 2 3 4 5
Weights -0.240069 -0.062707 0.233711 0.967324 -0.01422
1 2 3 4
Weights -0.117999 -0.124718 0.246435 0.950663
1 2 3 4 5
Weights -0.13654 -0.123405 0.244886 0.948148 -0.029251
1 2 3 4
Weights -0.093605 -0.006906 0.102649 0.876906
1 2 3 4 5
Weights -0.08211 -0.007366 0.102448 0.876536 0.014665
1 2 3 4
Weights -0.129545 -0.023044 0.18386 0.844481
1 2 3 4 5
Weights -0.074554 -0.024178 0.183379 0.841762 0.063211
1 2 3 4
Weights -0.092807 -0.131776 0.282139 0.89545
1 2 3 4 5
Weights -0.067599 -0.132208 0.283133 0.893529 0.033335
1 2 3 4
Weights -0.009498 -0.311705 0.412706 0.824386
1 2 3 4 5
Weights 0.023009 -0.312523 0.413387 0.822497 0.037942
1 2 3 4
Weights -0.128412 -0.118041 0.323202 0.825309
1 2 3 4 5
Weights -0.192349 -0.113523 0.321448 0.819754 -0.079399
1 2 3 4
Weights -0.161916 -0.163053 0.330386 0.87085
1 2 3 4 5
Weights -0.076865 -0.163057 0.331148 0.864112 0.0959
1 2 3 4
Weights 0.010511 0.052463 0.053912 0.704616
1 2 3 4 5
Weights 0.079783 0.049108 0.051513 0.701157 0.081748
1 2 3 4
Weights -0.126994 -0.19504 0.219551 1.019476
1 2 3 4 5
Weights -0.130133 -0.195016 0.219389 1.01952 -0.004214
1 2 3 4
Weights -0.115298 -0.20497 0.425823 0.816829
1 2 3 4 5
Weights -0.079599 -0.20583 0.426806 0.814677 0.042952
1 2 3 4
Weights 0.002651 -0.348249 0.358977 0.939376
1 2 3 4 5
Weights 0.051236 -0.348116 0.36034 0.936315 0.054582
1 2 3 4
Weights -0.108273 -0.129958 0.033109 1.106449
1 2 3 4 5
Weights -0.108121 -0.129956 0.033111 1.106446 0.00018
1 2 3 4
Weights -0.044944 -0.26287 0.476066 0.758691
1 2 3 4 5
Weights -0.012914 -0.26344 0.476558 0.757967 0.035035
1 2 3 4
Weights -0.013013 -0.317016 0.426181 0.842591
1 2 3 4 5
Weights -0.033298 -0.316581 0.425661 0.842414 -0.022348
1 2 3 4
Weights -0.159968 -0.172638 0.334498 0.881469
1 2 3 4 5
Weights -0.127462 -0.173222 0.335003 0.879927 0.038133
1 2 3 4
Weights -0.037341 0.013818 0.223824 0.635338
1 2 3 4 5
Weights -0.165033 0.018056 0.223773 0.631167 -0.139383
1 2 3 4
Weights 0.100711 -0.191716 0.329805 0.692442
1 2 3 4 5
Weights 0.078406 -0.190505 0.32973 0.69264 -0.025814
1 2 3 4
Weights -0.208243 -0.148594 0.549474 0.67443
1 2 3 4 5
Weights -0.175555 -0.150149 0.549611 0.673545 0.037217
1 2 3 4
Weights -0.141898 -0.110924 0.232945 0.927262
1 2 3 4 5
Weights -0.208889 -0.10781 0.231841 0.921899 -0.080434
1 2 3 4
Weights 0.381675 0.062528 0.063654 0.421444
1 2 3 4 5
Weights 0.257308 0.071154 0.069468 0.421242 -0.142503
1 2 3 4
Weights 0.029428 -0.45095 0.512332 0.853102
1 2 3 4 5
Weights 0.080316 -0.450917 0.513456 0.851098 0.05492
1 2 3 4
Weights -0.032708 -0.262768 0.405787 0.806008
1 2 3 4 5
Weights -0.050689 -0.262209 0.405414 0.80597 -0.020312
1 2 3 4
Weights -0.083386 -0.227836 0.200279 1.03655
1 2 3 4 5
Weights -0.070486 -0.227764 0.200644 1.036163 0.015002
1 2 3 4
Weights 0.015592 -0.129297 0.162262 0.860778
1 2 3 4 5
Weights 0.02982 -0.129769 0.162278 0.860173 0.017339
1 2 3 4
Weights 0.025628 -0.33195 0.415192 0.805635
1 2 3 4 5
Weights 0.023659 -0.331838 0.415102 0.805698 -0.002504
1 2 3 4
Weights 0.053593 0.12952 0.066834 0.606904
1 2 3 4 5
Weights 0.004815 0.130793 0.067753 0.606936 -0.052195
1 2 3 4
Weights -0.035522 -0.112468 0.319545 0.772909
1 2 3 4 5
Weights 0.006078 -0.114231 0.319958 0.770183 0.048925
1 2 3 4
Weights -0.090998 -0.277814 0.439076 0.858399
1 2 3 4 5
Weights -0.091142 -0.277812 0.439071 0.858399 -0.000162
1 2 3 4
Weights -0.178236 -0.155317 0.165772 1.076691
1 2 3 4 5
Weights -0.182995 -0.155316 0.165714 1.076752 -0.005652
1 2 3 4
Weights -0.03619 -0.406361 0.481499 0.863265
1 2 3 4 5
Weights -0.065411 -0.405128 0.480321 0.863009 -0.03415
1 2 3 4
Weights -0.133475 -0.06212 0.189467 0.9403
1 2 3 4 5
Weights -0.092659 -0.062172 0.190145 0.938632 0.046934
1 2 3 4
Weights -0.172166 -0.05671 0.170552 0.962851
1 2 3 4 5
Weights -0.313924 -0.049361 0.170535 0.941426 -0.167721
1 2 3 4
Weights -0.00556 -0.395592 0.505087 0.830839
1 2 3 4 5
Weights 0.079305 -0.395139 0.506518 0.825822 0.091549
1 2 3 4
Weights -0.000324 0.01582 0.15117 0.694887
1 2 3 4 5
Weights 0.108811 0.010565 0.147194 0.686304 0.124837
1 2 3 4
Weights 0.128607 -0.114295 0.245675 0.676194
1 2 3 4 5
Weights 0.092642 -0.112689 0.245977 0.676402 -0.04075
1 2 3 4
Weights -0.137674 -0.072332 0.140423 0.960372
1 2 3 4 5
Weights -0.153416 -0.072091 0.14037 0.960353 -0.018108
1 2 3 4
Weights 0.024927 -0.328772 0.376807 0.868938
1 2 3 4 5
Weights 0.086544 -0.328825 0.377593 0.866237 0.066469
1 2 3 4
Weights -0.185035 -0.036549 0.217049 0.916408
1 2 3 4 5
Weights -0.248732 -0.03354 0.216046 0.910053 -0.07791
1 2 3 4
Weights -0.243279 -0.045304 0.310472 0.889702
1 2 3 4 5
Weights -0.264524 -0.04474 0.310226 0.889172 -0.023953
1 2 3 4
Weights -0.022376 -0.132982 0.280192 0.818175
1 2 3 4 5
Weights -0.082702 -0.129174 0.278421 0.813411 -0.074992
1 2 3 4
Weights -0.182001 -0.180653 0.320769 0.925469
1 2 3 4 5
Weights -0.197418 -0.180385 0.320716 0.925547 -0.016838
1 2 3 4
Weights -0.069672 -0.105255 0.324256 0.781141
1 2 3 4 5
Weights -0.091998 -0.104231 0.323973 0.780793 -0.026001
1 2 3 4
Weights -0.016895 -0.23254 0.153256 0.958345
1 2 3 4 5
Weights 0.055246 -0.232377 0.153708 0.955781 0.077088
1 2 3 4
Weights 0.022481 -0.131577 0.21244 0.760523
1 2 3 4 5
Weights 0.040805 -0.132128 0.212337 0.76002 0.020584
1 2 3 4
Weights -0.248839 -0.142512 0.462355 0.841014
1 2 3 4 5
Weights -0.291651 -0.139218 0.46089 0.83727 -0.052069
1 2 3 4
Weights 0.004716 -0.273489 0.594216 0.588842
1 2 3 4 5
Weights 0.011776 -0.273735 0.594301 0.588762 0.007675
1 2 3 4
Weights -0.121799 0.023751 0.167091 0.856263
1 2 3 4 5
Weights -0.181946 0.027319 0.166757 0.848791 -0.078025
1 2 3 4
Weights -0.095559 -0.27724 0.229249 1.051423
1 2 3 4 5
Weights -0.110739 -0.27712 0.228846 1.051571 -0.018217
1 2 3 4
Weights -0.124513 -0.065585 0.129546 0.959842
1 2 3 4 5
Weights -0.154865 -0.06504 0.129393 0.959071 -0.035636
1 2 3 4
Weights -0.039269 -0.211149 0.208379 0.955195
1 2 3 4 5
Weights -0.062426 -0.210915 0.208096 0.955093 -0.025593
1 2 3 4
Weights -0.15437 -0.128932 0.363735 0.810107
1 2 3 4 5
Weights -0.237043 -0.125829 0.362826 0.806799 -0.091504
1 2 3 4
Weights -0.176825 0.003974 0.257393 0.82991
1 2 3 4 5
Weights -0.341663 0.024257 0.249462 0.770446 -0.223006
1 2 3 4
Weights 0.245072 -0.111408 0.129108 0.683725
1 2 3 4 5
Weights 0.131947 -0.105684 0.131013 0.681862 -0.13225
1 2 3 4
Weights -0.053653 -0.156746 0.180478 0.906685
1 2 3 4 5
Weights -0.043934 -0.156846 0.180515 0.906503 0.010697
1 2 3 4
Weights -0.063273 -0.211343 0.440294 0.752009
1 2 3 4 5
Weights -0.138366 -0.205934 0.437263 0.746489 -0.089412
1 2 3 4
Weights -0.040329 -0.014287 0.176309 0.811266
1 2 3 4 5
Weights -0.011434 -0.015553 0.175737 0.809839 0.034871
1 2 3 4
Weights -0.049439 -0.244092 0.255596 0.968896
1 2 3 4 5
Weights -0.065821 -0.242977 0.253946 0.96792 -0.026363
1 2 3 4
Weights -0.032799 -0.357149 0.425715 0.911441
1 2 3 4 5
Weights -0.024769 -0.357264 0.426217 0.911257 0.009736
1 2 3 4
Weights -0.195576 -0.099886 0.398822 0.794825
1 2 3 4 5
Weights -0.185943 -0.100364 0.398941 0.794716 0.011522
1 2 3 4
Weights -0.037229 -0.371153 0.460452 0.836582
1 2 3 4 5
Weights 0.022403 -0.37152 0.461874 0.832407 0.06769
1 2 3 4
Weights -0.187042 -0.108681 0.179253 1.012636
1 2 3 4 5
Weights -0.226368 -0.107635 0.179089 1.011429 -0.045991
1 2 3 4
Weights -0.068056 -0.07185 0.216421 0.842565
1 2 3 4 5
Weights -0.030465 -0.072651 0.216522 0.84061 0.044013
1 2 3 4
Weights 0.04445 0.063898 0.055683 0.763813
1 2 3 4 5
Weights 0.004057 0.06563 0.056862 0.763488 -0.046999
1 2 3 4
Weights -0.180262 -0.021293 0.266451 0.829585
1 2 3 4 5
Weights -0.172962 -0.021637 0.266465 0.829539 0.008891
1 2 3 4
Weights -0.224734 -0.079961 0.329663 0.877935
1 2 3 4 5
Weights -0.247913 -0.0788 0.329232 0.876941 -0.028419
1 2 3 4
Weights -0.02929 -0.015031 0.279392 0.715913
1 2 3 4 5
Weights -0.034439 -0.014466 0.279493 0.715941 -0.006774
1 2 3 4
Weights -0.026334 -0.363024 0.227977 1.061936
1 2 3 4 5
Weights 0.018369 -0.361428 0.230156 1.056926 0.053362
1 2 3 4
Weights 0.002243 -0.027064 0.152631 0.79091
1 2 3 4 5
Weights -0.03036 -0.025953 0.152899 0.790583 -0.037283
1 2 3 4
Weights 0.057708 -0.333153 0.392261 0.839584
1 2 3 4 5
Weights 0.093556 -0.334097 0.393361 0.836645 0.042923
1 2 3 4
Weights -0.049498 -0.313568 0.177703 1.071589
1 2 3 4 5
Weights 0.038831 -0.310355 0.180451 1.06213 0.09907
1 2 3 4
Weights -0.06031 -0.040706 0.315503 0.662942
1 2 3 4 5
Weights -0.006852 -0.043462 0.314584 0.660598 0.061484
1 2 3 4
Weights -0.07099 -0.313046 0.515653 0.799314
1 2 3 4 5
Weights -0.012639 -0.313513 0.516711 0.796612 0.063989
1 2 3 4
Weights -0.006761 0.031234 0.114174 0.710793
1 2 3 4 5
Weights 0.008518 0.030843 0.113942 0.710579 0.016713
1 2 3 4
Weights -0.00267 -0.395135 0.335507 0.973531
1 2 3 4 5
Weights 0.039943 -0.394562 0.337327 0.969921 0.049994
1 2 3 4
Weights -0.145198 -0.114883 0.197498 0.956512
1 2 3 4 5
Weights -0.135219 -0.114991 0.197594 0.956283 0.011552
1 2 3 4
Weights -0.029364 -0.206871 0.378952 0.747648
1 2 3 4 5
Weights -0.043097 -0.206332 0.378807 0.747714 -0.015562
1 2 3 4
Weights -0.030138 -0.321158 0.504516 0.778814
1 2 3 4 5
Weights 0.009998 -0.321913 0.505519 0.776491 0.04591
1 2 3 4
Weights -0.131325 -0.153309 0.135789 1.04814
1 2 3 4 5
Weights -0.111649 -0.153203 0.135991 1.047551 0.021741
1 2 3 4
Weights -0.17053 -0.024801 0.203526 0.921763
1 2 3 4 5
Weights -0.235999 -0.018637 0.201496 0.905526 -0.093366
1 2 3 4
Weights -0.078537 -0.147354 0.07966 1.073456
1 2 3 4 5
Weights -0.06607 -0.147269 0.07978 1.073302 0.01345
1 2 3 4
Weights -0.035369 -0.413796 0.355409 0.991736
1 2 3 4 5
Weights -0.001209 -0.413272 0.356882 0.98874 0.040301
1 2 3 4
Weights -0.072281 -0.208971 0.047599 1.136149
1 2 3 4 5
Weights -0.042416 -0.208116 0.048388 1.133981 0.034896
1 2 3 4
Weights -0.144512 0.028178 0.14538 0.871143
1 2 3 4 5
Weights -0.125799 0.027697 0.145263 0.870865 0.02147
1 2 3 4
Weights -0.142803 -0.1101 0.340716 0.799687
1 2 3 4 5
Weights -0.13858 -0.110288 0.340762 0.799646 0.005048
1 2 3 4
Weights -0.124621 -0.258389 0.456455 0.807841
1 2 3 4 5
Weights -0.089404 -0.259078 0.456888 0.806482 0.039449
1 2 3 4
Weights -0.124354 -0.224497 0.26651 0.990473
1 2 3 4 5
Weights -0.136286 -0.224234 0.266153 0.990504 -0.01493
1 2 3 4
Weights -0.190709 -0.005547 0.131807 0.982056
1 2 3 4 5
Weights -0.181817 -0.005611 0.131857 0.981991 0.010236
1 2 3 4
Weights -0.077748 0.023033 0.323605 0.705532
1 2 3 4 5
Weights -0.137826 0.023816 0.323342 0.704415 -0.062332
1 2 3 4
Weights -0.035876 -0.18452 0.314834 0.865413
1 2 3 4 5
Weights -0.003058 -0.185689 0.315579 0.861842 0.041945
1 2 3 4
Weights -0.047262 -0.002229 0.226202 0.70838
1 2 3 4 5
Weights 0.069096 -0.008366 0.221679 0.695182 0.137106
1 2 3 4
Weights 0.06832 -0.209011 0.25416 0.776763
1 2 3 4 5
Weights 0.001505 -0.205208 0.253222 0.774734 -0.080438
1 2 3 4
Weights -0.113635 -0.131823 0.146742 0.998626
1 2 3 4 5
Weights -0.127781 -0.131668 0.146553 0.998623 -0.016929
1 2 3 4
Weights -0.228127 -0.106717 0.231812 1.012897
1 2 3 4 5
Weights -0.238978 -0.106524 0.231699 1.012746 -0.012985
1 2 3 4
Weights -0.035312 -0.445716 0.497934 0.925608
1 2 3 4 5
Weights 0.031066 -0.443824 0.500541 0.920093 0.073379
1 2 3 4
Weights -0.142696 0.205387 0.273841 0.485771
1 2 3 4 5
Weights -0.116615 0.204657 0.273477 0.485705 0.02744
1 2 3 4
Weights -0.05705 -0.229491 0.407891 0.805639
1 2 3 4 5
Weights -0.099808 -0.225465 0.404886 0.801931 -0.056234
1 2 3 4
Weights -0.19431 -0.187309 0.349044 0.965446
1 2 3 4 5
Weights -0.167259 -0.187173 0.349836 0.96472 0.030025
1 2 3 4
Weights -0.118332 -0.061904 0.073043 1.051207
1 2 3 4 5
Weights -0.085687 -0.061642 0.073684 1.049612 0.038494
1 2 3 4
Weights 0.02993 -0.395992 0.469445 0.844986
1 2 3 4 5
Weights 0.083528 -0.396568 0.470382 0.841345 0.05975
1 2 3 4
Weights -0.106968 -0.078205 0.389156 0.667093
1 2 3 4 5
Weights -0.295731 -0.070312 0.386915 0.656128 -0.20451
1 2 3 4
Weights -0.078667 0.038441 0.139088 0.796303
1 2 3 4 5
Weights -0.051655 0.037765 0.13881 0.795836 0.029961
1 2 3 4
Weights 0.056119 0.025977 0.18918 0.671473
1 2 3 4 5
Weights 0.038903 0.026937 0.189561 0.671596 -0.019865
1 2 3 4
Weights -0.046847 -0.174123 0.269078 0.860805
1 2 3 4 5
Weights -0.037717 -0.17443 0.269216 0.860493 0.011092
1 2 3 4
Weights 0.052456 -0.173319 0.358314 0.687485
1 2 3 4 5
Weights 0.006621 -0.169753 0.357577 0.686273 -0.055076
1 2 3 4
Weights -0.157725 0.068374 0.119818 0.880075
1 2 3 4 5
Weights -0.243512 0.073768 0.120628 0.86479 -0.113235
1 2 3 4
Weights -0.015844 0.003139 0.041231 0.858716
1 2 3 4 5
Weights 0.041551 0.001143 0.039506 0.85432 0.06861
1 2 3 4
Weights -0.011016 -0.191651 0.379528 0.746708
1 2 3 4 5
Weights 0.036789 -0.194117 0.380027 0.741387 0.060586
1 2 3 4
Weights -0.078963 -0.334799 0.335658 0.972491
1 2 3 4 5
Weights -0.065913 -0.334879 0.335973 0.971913 0.015001
1 2 3 4
Weights -0.134622 -0.24039 0.262206 1.015514
1 2 3 4 5
Weights -0.208818 -0.237991 0.262084 1.012438 -0.083116
1 2 3 4
Weights -0.001953 -0.096623 0.319345 0.715625
1 2 3 4 5
Weights -0.020498 -0.09579 0.319251 0.715542 -0.021264
1 2 3 4
Weights -0.157859 -0.320383 0.430045 0.96115
1 2 3 4 5
Weights -0.184024 -0.319325 0.429287 0.960393 -0.030803
Predicting t+4...
t+1 t+2 t+3 t+4
R2 0.962129 -7.879305e+12 -6.395966e+04 0.552413
RMSE 70.932832 1.023147e+09 9.218291e+04 243.855585
MSE 5031.466647 1.046829e+18 8.497689e+09 59465.546474
MAE 0.398008 4.361101e+05 9.653635e+01 1.111711
MAPE 39.800825 4.361101e+07 9.653635e+03 111.171106
MPE -25.024504 -3.594910e+07 4.344018e+03 -74.163403
CPU times: user 4min 36s, sys: 20.2 s, total: 4min 56s
Wall time: 3min 14s
In [106]:
plot_diff_along_time(X_test, arma_preds_s)
In [107]:
j, s = 2, 100
plot_bispecific(X_test, baseline_preds, arma_preds, arma_preds_s, order, limit_t, j, s)
In [108]:
plot_qualitative_analysis(arma_preds, X_test, limit_t, order, subway_stations, del_hours)
In [109]:
plot_qualitative_analysis(arma_preds_s, X_test, limit_t, order, subway_stations, del_hours)
In [110]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(arma_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="ARMA")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="General baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of General baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [111]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(arma_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="ARMA")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [112]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])
x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(arma_scores).T[1], np.array(arma_scores_s).T[1])).T
baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["General baseline", "Baseline per station", "AR per t", "AR per station per t"]
arlineObjects = plt.plot(x, model_scores, linewidth=3)

# ['D', '*', '|', 'X']
# labels = ["Full baseline", "Baseline per station", "Baseline per day",
#                    "Baseline per station and day", "Full AR", "AR per station"]
for i, m in zip(range(4), ['D', '*']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)
    
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Recurrent Neural Networks

In [113]:
class UnivariateRNN(nn.modules):
    def __init__(self):
        pass
--------------------------------------
NameErrorTraceback (most recent call last)
<ipython-input-113-fb065dc06834> in <module>()
----> 1 class UnivariateRNN(nn.modules):
      2     def __init__(self):
      3         pass

NameError: name 'nn' is not defined

Brouillon

In [ ]:
ar_preds_s
In [ ]:
arb_preds[0].minor_axis
In [ ]:
len(generate_times("15min")[(del_hours * 4) + 4:])